Skip to content

Commit 640ff12

Browse files
authored
ci: Update postgres tests to use testcontainers (#2650)
* ci: Add a postgres testcontainer for tests Signed-off-by: Achal Shah <achals@gmail.com> * ci: Update postgres tests to use test containers Signed-off-by: Achal Shah <achals@gmail.com> * ci: Update postgres tests to use test containers Signed-off-by: Achal Shah <achals@gmail.com>
1 parent 5b4b07f commit 640ff12

File tree

7 files changed

+199
-38
lines changed

7 files changed

+199
-38
lines changed

CONTRIBUTING.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -178,6 +178,7 @@ The services with containerized replacements currently implemented are:
178178
- Redis
179179
- Trino
180180
- HBase
181+
- Postgres
181182

182183
You can run `make test-python-integration-container` to run tests against the containerized versions of dependencies.
183184

Makefile

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,12 +79,15 @@ test-python-universal-postgres:
7979
FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.postgres_repo_configuration \
8080
FEAST_USAGE=False \
8181
IS_TEST=True \
82-
python -m pytest --integration --universal \
82+
python -m pytest -x --integration --universal \
8383
-k "not test_historical_retrieval_fails_on_validation and \
8484
not test_historical_retrieval_with_validation and \
8585
not test_historical_features_persisting and \
8686
not test_historical_retrieval_fails_on_validation and \
87-
not test_universal_cli" \
87+
not test_universal_cli and \
88+
not test_go_feature_server and \
89+
not test_feature_logging and \
90+
not test_universal_types" \
8891
sdk/python/tests
8992

9093
test-python-universal-local:

sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,10 @@
55
PostgreSQLDataSourceCreator,
66
)
77

8-
POSTGRES_ONLINE_CONFIG = {
9-
"type": "postgres",
10-
"host": "localhost",
11-
"port": "5432",
12-
"database": "postgres",
13-
"db_schema": "feature_store",
14-
"user": "postgres",
15-
"password": "docker",
16-
}
17-
188
FULL_REPO_CONFIGS = [
199
IntegrationTestRepoConfig(
2010
provider="local",
2111
offline_store_creator=PostgreSQLDataSourceCreator,
22-
online_store=POSTGRES_ONLINE_CONFIG,
12+
online_store_creator=PostgreSQLDataSourceCreator,
2313
),
2414
]

sdk/python/feast/infra/online_stores/contrib/postgres.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def online_write_batch(
6464
created_ts,
6565
)
6666
)
67-
# Controll the batch so that we can update the progress
67+
# Control the batch so that we can update the progress
6868
batch_size = 5000
6969
for i in range(0, len(insert_values), batch_size):
7070
cur_batch = insert_values[i : i + batch_size]

sdk/python/tests/conftest.py

Lines changed: 53 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,16 +204,68 @@ def teardown():
204204
return TrinoContainerSingleton
205205

206206

207+
class PostgresContainerSingleton:
208+
container = None
209+
is_running = False
210+
211+
postgres_user = "test"
212+
postgres_password = "test"
213+
postgres_db = "test"
214+
215+
@classmethod
216+
def get_singleton(cls):
217+
if not cls.is_running:
218+
cls.container = (
219+
DockerContainer("postgres:latest")
220+
.with_exposed_ports(5432)
221+
.with_env("POSTGRES_USER", cls.postgres_user)
222+
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
223+
.with_env("POSTGRES_DB", cls.postgres_db)
224+
)
225+
226+
cls.container.start()
227+
log_string_to_wait_for = "database system is ready to accept connections"
228+
waited = wait_for_logs(
229+
container=cls.container,
230+
predicate=log_string_to_wait_for,
231+
timeout=30,
232+
interval=10,
233+
)
234+
logger.info("Waited for %s seconds until postgres container was up", waited)
235+
cls.is_running = True
236+
return cls.container
237+
238+
@classmethod
239+
def teardown(cls):
240+
if cls.container:
241+
cls.container.stop()
242+
243+
244+
@pytest.fixture(scope="session")
245+
def postgres_fixture(request):
246+
def teardown():
247+
PostgresContainerSingleton.teardown()
248+
249+
request.addfinalizer(teardown)
250+
return PostgresContainerSingleton
251+
252+
207253
@pytest.fixture(
208254
params=FULL_REPO_CONFIGS, scope="session", ids=[str(c) for c in FULL_REPO_CONFIGS]
209255
)
210-
def environment(request, worker_id: str, trino_fixture):
256+
def environment(request, worker_id: str, trino_fixture, postgres_fixture):
211257
if "TrinoSourceCreator" in request.param.offline_store_creator.__name__:
212258
e = construct_test_environment(
213259
request.param,
214260
worker_id=worker_id,
215261
offline_container=trino_fixture.get_singleton(),
216262
)
263+
elif "PostgresSourceCreator" in request.param.offline_store_creator.__name__:
264+
e = construct_test_environment(
265+
request.param,
266+
worker_id=worker_id,
267+
offline_container=postgres_fixture.get_singleton(),
268+
)
217269
else:
218270
e = construct_test_environment(request.param, worker_id=worker_id)
219271
proc = Process(
Lines changed: 137 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,86 @@
1-
from typing import Dict, List, Optional
1+
import logging
2+
from typing import Dict, Optional
23

34
import pandas as pd
5+
from testcontainers.core.container import DockerContainer
6+
from testcontainers.core.waiting_utils import wait_for_logs
47

58
from feast.data_source import DataSource
69
from feast.infra.offline_stores.contrib.postgres_offline_store.postgres import (
710
PostgreSQLOfflineStoreConfig,
811
PostgreSQLSource,
912
)
10-
from feast.infra.utils.postgres.connection_utils import _get_conn, df_to_postgres_table
13+
from feast.infra.utils.postgres.connection_utils import df_to_postgres_table
1114
from feast.repo_config import FeastConfigBaseModel
1215
from tests.integration.feature_repos.universal.data_source_creator import (
1316
DataSourceCreator,
1417
)
18+
from tests.integration.feature_repos.universal.online_store_creator import (
19+
OnlineStoreCreator,
20+
)
1521

22+
logger = logging.getLogger(__name__)
1623

17-
class PostgreSQLDataSourceCreator(DataSourceCreator):
18-
tables: List[str] = []
1924

20-
def __init__(self, project_name: str, *args, **kwargs):
21-
super().__init__(project_name)
22-
self.project_name = project_name
25+
class PostgresSourceCreatorSingleton:
26+
postgres_user = "test"
27+
postgres_password = "test"
28+
postgres_db = "test"
29+
30+
running = False
31+
32+
project_name = None
33+
container = None
34+
provided_container = None
2335

24-
self.offline_store_config = PostgreSQLOfflineStoreConfig(
36+
offline_store_config = None
37+
38+
@classmethod
39+
def initialize(cls, project_name: str, *args, **kwargs):
40+
cls.project_name = project_name
41+
42+
if "offline_container" not in kwargs or not kwargs.get(
43+
"offline_container", None
44+
):
45+
# If we don't get an offline container provided, we try to create it on the fly.
46+
# the problem here is that each test creates its own container, which basically
47+
# browns out developer laptops.
48+
cls.container = (
49+
DockerContainer("postgres:latest")
50+
.with_exposed_ports(5432)
51+
.with_env("POSTGRES_USER", cls.postgres_user)
52+
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
53+
.with_env("POSTGRES_DB", cls.postgres_db)
54+
)
55+
56+
cls.container.start()
57+
cls.provided_container = False
58+
log_string_to_wait_for = "database system is ready to accept connections"
59+
waited = wait_for_logs(
60+
container=cls.container,
61+
predicate=log_string_to_wait_for,
62+
timeout=30,
63+
interval=10,
64+
)
65+
logger.info("Waited for %s seconds until postgres container was up", waited)
66+
cls.running = True
67+
else:
68+
cls.provided_container = True
69+
cls.container = kwargs["offline_container"]
70+
71+
cls.offline_store_config = PostgreSQLOfflineStoreConfig(
2572
type="postgres",
2673
host="localhost",
27-
port=5432,
28-
database="postgres",
74+
port=cls.container.get_exposed_port(5432),
75+
database=cls.container.env["POSTGRES_DB"],
2976
db_schema="public",
30-
user="postgres",
31-
password="docker",
77+
user=cls.container.env["POSTGRES_USER"],
78+
password=cls.container.env["POSTGRES_PASSWORD"],
3279
)
3380

81+
@classmethod
3482
def create_data_source(
35-
self,
83+
cls,
3684
df: pd.DataFrame,
3785
destination_name: str,
3886
suffix: Optional[str] = None,
@@ -41,11 +89,10 @@ def create_data_source(
4189
field_mapping: Dict[str, str] = None,
4290
) -> DataSource:
4391

44-
destination_name = self.get_prefixed_table_name(destination_name)
92+
destination_name = cls.get_prefixed_table_name(destination_name)
4593

46-
df_to_postgres_table(self.offline_store_config, df, destination_name)
47-
48-
self.tables.append(destination_name)
94+
if cls.offline_store_config:
95+
df_to_postgres_table(cls.offline_store_config, df, destination_name)
4996

5097
return PostgreSQLSource(
5198
name=destination_name,
@@ -55,17 +102,85 @@ def create_data_source(
55102
field_mapping=field_mapping or {"ts_1": "ts"},
56103
)
57104

105+
@classmethod
106+
def create_offline_store_config(cls) -> PostgreSQLOfflineStoreConfig:
107+
assert cls.offline_store_config
108+
return cls.offline_store_config
109+
110+
@classmethod
111+
def get_prefixed_table_name(cls, suffix: str) -> str:
112+
return f"{cls.project_name}_{suffix}"
113+
114+
@classmethod
115+
def create_online_store(cls) -> Dict[str, str]:
116+
assert cls.container
117+
return {
118+
"type": "postgres",
119+
"host": "localhost",
120+
"port": cls.container.get_exposed_port(5432),
121+
"database": cls.postgres_db,
122+
"db_schema": "feature_store",
123+
"user": cls.postgres_user,
124+
"password": cls.postgres_password,
125+
}
126+
127+
@classmethod
128+
def create_saved_dataset_destination(cls):
129+
# FIXME: ...
130+
return None
131+
132+
@classmethod
133+
def teardown(cls):
134+
if not cls.provided_container and cls.running:
135+
cls.container.stop()
136+
cls.running = False
137+
cls.container = None
138+
cls.project = None
139+
140+
141+
class PostgreSQLDataSourceCreator(DataSourceCreator, OnlineStoreCreator):
142+
143+
postgres_user = "test"
144+
postgres_password = "test"
145+
postgres_db = "test"
146+
147+
running = False
148+
149+
def __init__(self, project_name: str, *args, **kwargs):
150+
super().__init__(project_name)
151+
PostgresSourceCreatorSingleton.initialize(project_name, args, kwargs)
152+
153+
def create_data_source(
154+
self,
155+
df: pd.DataFrame,
156+
destination_name: str,
157+
suffix: Optional[str] = None,
158+
timestamp_field="ts",
159+
created_timestamp_column="created_ts",
160+
field_mapping: Dict[str, str] = None,
161+
) -> DataSource:
162+
163+
return PostgresSourceCreatorSingleton.create_data_source(
164+
df,
165+
destination_name,
166+
suffix,
167+
timestamp_field,
168+
created_timestamp_column,
169+
field_mapping,
170+
)
171+
58172
def create_offline_store_config(self) -> FeastConfigBaseModel:
59-
return self.offline_store_config
173+
return PostgresSourceCreatorSingleton.create_offline_store_config()
60174

61175
def get_prefixed_table_name(self, suffix: str) -> str:
62-
return f"{self.project_name}_{suffix}"
176+
return PostgresSourceCreatorSingleton.get_prefixed_table_name(suffix)
177+
178+
def create_online_store(self) -> Dict[str, str]:
179+
return PostgresSourceCreatorSingleton.create_online_store()
63180

64181
def create_saved_dataset_destination(self):
65182
# FIXME: ...
66183
return None
67184

68185
def teardown(self):
69-
with _get_conn(self.offline_store_config) as conn, conn.cursor() as cur:
70-
for table in self.tables:
71-
cur.execute("DROP TABLE IF EXISTS " + table)
186+
PostgresSourceCreatorSingleton.teardown()

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@
150150
"pytest-mock==1.10.4",
151151
"Sphinx!=4.0.0,<4.4.0",
152152
"sphinx-rtd-theme",
153-
"testcontainers>=3.5",
153+
"testcontainers[postgresql]>=3.5",
154154
"adlfs==0.5.9",
155155
"firebase-admin==4.5.2",
156156
"pre-commit",

0 commit comments

Comments
 (0)