Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
pre cleanup commit
Signed-off-by: alex.eijssen <alex.eijssen@energyessentials.nl>
  • Loading branch information
alex.eijssen committed Aug 11, 2022
commit a76abef00ee5af4445bbabea2a6d9590971d6ab3
3 changes: 1 addition & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,7 @@ test-python-universal-postgres-offline:
FEAST_USAGE=False \
IS_TEST=True \
python -m pytest -n 8 --integration \
-k "not test_historical_features and \
not test_historical_retrieval_with_validation and \
-k "not test_historical_retrieval_with_validation and \
not test_historical_features_persisting and \
not test_universal_cli and \
not test_go_feature_server and \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,14 +118,16 @@ def get_historical_features(
full_feature_names: bool = False,
) -> RetrievalJob:

(
entity_schema,
df_query,
table_name,
) = _get_entity_schema_df_query_and_table_name(
config=config,
entity_df=entity_df,
)
# (
# entity_schema,
# df_query,
# table_name,
# ) = _get_entity_schema_df_query_and_table_name(
# config=config,
# entity_df=entity_df,
# )

entity_schema = _get_entity_schema(entity_df, config)

entity_df_event_timestamp_col = (
offline_utils.infer_event_timestamp_from_entity_df(entity_schema)
Expand All @@ -135,11 +137,13 @@ def get_historical_features(
entity_df,
entity_df_event_timestamp_col,
config,
df_query,
)

@contextlib.contextmanager
def query_generator() -> Iterator[str]:
table_name = offline_utils.get_temp_entity_table_name()

_upload_entity_df(config, entity_df, table_name)

expected_join_keys = offline_utils.get_expected_join_keys(
project, feature_views, registry
Expand Down Expand Up @@ -168,7 +172,7 @@ def query_generator() -> Iterator[str]:
try:
yield build_point_in_time_query(
query_context_dict,
left_table_query_string=df_query,
left_table_query_string=table_name,
entity_df_event_timestamp_col=entity_df_event_timestamp_col,
entity_df_columns=entity_schema.keys(),
query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN,
Expand Down Expand Up @@ -316,7 +320,6 @@ def _get_entity_df_event_timestamp_range(
entity_df: Union[pd.DataFrame, str],
entity_df_event_timestamp_col: str,
config: RepoConfig,
table_name: str,
) -> Tuple[datetime, datetime]:
if isinstance(entity_df, pd.DataFrame):
entity_df_event_timestamp = entity_df.loc[
Expand All @@ -327,15 +330,15 @@ def _get_entity_df_event_timestamp_range(
entity_df_event_timestamp, utc=True
)
entity_df_event_timestamp_range = (
entity_df_event_timestamp.min(),
entity_df_event_timestamp.max(),
entity_df_event_timestamp.min().to_pydatetime(),
entity_df_event_timestamp.max().to_pydatetime(),
)
elif isinstance(entity_df, str):
# If the entity_df is a string (SQL query), determine range
# from table
with _get_conn(config.offline_store) as conn, conn.cursor() as cur:
cur.execute(
f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max FROM {table_name}"
f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max FROM ({entity_df}) as tmp_alias"
),
res = cur.fetchone()
entity_df_event_timestamp_range = (res[0], res[1])
Expand Down Expand Up @@ -389,6 +392,34 @@ def build_point_in_time_query(
return query


def _upload_entity_df(
config: RepoConfig, entity_df: Union[pd.DataFrame, str], table_name: str
):
if isinstance(entity_df, pd.DataFrame):
# If the entity_df is a pandas dataframe, upload it to Postgres
df_to_postgres_table(config.offline_store, entity_df, table_name)
elif isinstance(entity_df, str):
# If the entity_df is a string (SQL query), create a Postgres table out of it
with _get_conn(config.offline_store) as conn, conn.cursor() as cur:
cur.execute(f"CREATE TABLE {table_name} AS ({entity_df})")
else:
raise InvalidEntityType(type(entity_df))


def _get_entity_schema(
entity_df: Union[pd.DataFrame, str],
config: RepoConfig,
) -> Dict[str, np.dtype]:
if isinstance(entity_df, pd.DataFrame):
return dict(zip(entity_df.columns, entity_df.dtypes))

elif isinstance(entity_df, str):
df_query = f"({entity_df}) AS sub"
return get_query_schema(config.offline_store, df_query)
else:
raise InvalidEntityType(type(entity_df))


def _get_entity_schema_df_query_and_table_name(
config: RepoConfig, entity_df: Union[pd.DataFrame, str]
) -> Tuple[Dict[str, np.dtype], str, Optional[str]]:
Expand Down