Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
ci: Update postgres tests to use test containers
Signed-off-by: Achal Shah <achals@gmail.com>
  • Loading branch information
achals committed May 9, 2022
commit 53a01706c23b2844eaafd58d891a3a36e3680247
1 change: 1 addition & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ The services with containerized replacements currently implemented are:
- Redis
- Trino
- HBase
- Postgres

You can run `make test-python-integration-container` to run tests against the containerized versions of dependencies.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
PostgreSQLDataSourceCreator,
)


FULL_REPO_CONFIGS = [
IntegrationTestRepoConfig(
provider="local",
Expand Down
14 changes: 8 additions & 6 deletions sdk/python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,16 +217,19 @@ def get_singleton(cls):
if not cls.is_running:
cls.container = (
DockerContainer("postgres:latest")
.with_exposed_ports(5432)
.with_env("POSTGRES_USER", cls.postgres_user)
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
.with_env("POSTGRES_DB", cls.postgres_db)
.with_exposed_ports(5432)
.with_env("POSTGRES_USER", cls.postgres_user)
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
.with_env("POSTGRES_DB", cls.postgres_db)
)

cls.container.start()
log_string_to_wait_for = "database system is ready to accept connections"
waited = wait_for_logs(
container=cls.container, predicate=log_string_to_wait_for, timeout=30, interval=10
container=cls.container,
predicate=log_string_to_wait_for,
timeout=30,
interval=10,
)
logger.info("Waited for %s seconds until postgres container was up", waited)
cls.is_running = True
Expand All @@ -247,7 +250,6 @@ def teardown():
return PostgresContainerSingleton



@pytest.fixture(
params=FULL_REPO_CONFIGS, scope="session", ids=[str(c) for c in FULL_REPO_CONFIGS]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
from tests.integration.feature_repos.universal.online_store_creator import OnlineStoreCreator
from tests.integration.feature_repos.universal.online_store_creator import (
OnlineStoreCreator,
)

logger = logging.getLogger(__name__)

Expand All @@ -38,24 +40,27 @@ def initialize(cls, project_name: str, *args, **kwargs):
cls.project_name = project_name

if "offline_container" not in kwargs or not kwargs.get(
"offline_container", None
"offline_container", None
):
# If we don't get an offline container provided, we try to create it on the fly.
# the problem here is that each test creates its own container, which basically
# browns out developer laptops.
cls.container = (
DockerContainer("postgres:latest")
.with_exposed_ports(5432)
.with_env("POSTGRES_USER", cls.postgres_user)
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
.with_env("POSTGRES_DB", cls.postgres_db)
.with_exposed_ports(5432)
.with_env("POSTGRES_USER", cls.postgres_user)
.with_env("POSTGRES_PASSWORD", cls.postgres_password)
.with_env("POSTGRES_DB", cls.postgres_db)
)

cls.container.start()
cls.provided_container = False
log_string_to_wait_for = "database system is ready to accept connections"
waited = wait_for_logs(
container=cls.container, predicate=log_string_to_wait_for, timeout=30, interval=10
container=cls.container,
predicate=log_string_to_wait_for,
timeout=30,
interval=10,
)
logger.info("Waited for %s seconds until postgres container was up", waited)
cls.running = True
Expand All @@ -75,18 +80,19 @@ def initialize(cls, project_name: str, *args, **kwargs):

@classmethod
def create_data_source(
cls,
df: pd.DataFrame,
destination_name: str,
suffix: Optional[str] = None,
timestamp_field="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
cls,
df: pd.DataFrame,
destination_name: str,
suffix: Optional[str] = None,
timestamp_field="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:

destination_name = cls.get_prefixed_table_name(destination_name)

df_to_postgres_table(cls.offline_store_config, df, destination_name)
if cls.offline_store_config:
df_to_postgres_table(cls.offline_store_config, df, destination_name)

return PostgreSQLSource(
name=destination_name,
Expand All @@ -97,7 +103,8 @@ def create_data_source(
)

@classmethod
def create_offline_store_config(cls) -> FeastConfigBaseModel:
def create_offline_store_config(cls) -> PostgreSQLOfflineStoreConfig:
assert cls.offline_store_config
return cls.offline_store_config

@classmethod
Expand All @@ -106,6 +113,7 @@ def get_prefixed_table_name(cls, suffix: str) -> str:

@classmethod
def create_online_store(cls) -> Dict[str, str]:
assert cls.container
return {
"type": "postgres",
"host": "localhost",
Expand Down Expand Up @@ -152,7 +160,14 @@ def create_data_source(
field_mapping: Dict[str, str] = None,
) -> DataSource:

return PostgresSourceCreatorSingleton.create_data_source(df, destination_name, suffix, timestamp_field, created_timestamp_column, field_mapping)
return PostgresSourceCreatorSingleton.create_data_source(
df,
destination_name,
suffix,
timestamp_field,
created_timestamp_column,
field_mapping,
)

def create_offline_store_config(self) -> FeastConfigBaseModel:
return PostgresSourceCreatorSingleton.create_offline_store_config()
Expand Down