Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
e1e210d
Broken state
kevjumba Aug 5, 2022
011d1e0
working state
kevjumba Aug 10, 2022
a6a2fce
Fix the lint issues
kevjumba Aug 10, 2022
57b63bb
Semi working state
kevjumba Aug 10, 2022
ae7ed8a
Fix
kevjumba Aug 10, 2022
421645b
Fremove print
kevjumba Aug 10, 2022
07fece5
Fix lint
kevjumba Aug 11, 2022
4062031
Run build-sphinx
kevjumba Aug 11, 2022
cb39329
Add tutorials
kevjumba Aug 11, 2022
554ca1a
Fix
kevjumba Aug 11, 2022
4a969e7
Fix?
kevjumba Aug 11, 2022
116320a
Fix lint
kevjumba Aug 11, 2022
c0b16ef
Fix
kevjumba Aug 11, 2022
44d09d0
Fix lint
kevjumba Aug 12, 2022
b6f0a79
Begin configuring tests
adchia Aug 15, 2022
2b2ff40
Fix
kevjumba Aug 15, 2022
4616366
Working version
kevjumba Aug 16, 2022
c7d9852
Fix
kevjumba Aug 17, 2022
d2e290b
Fix
kevjumba Aug 17, 2022
a726a9a
Fix
kevjumba Aug 17, 2022
32992e3
Fix lint
kevjumba Aug 17, 2022
ebb934b
Fix lint
kevjumba Aug 17, 2022
e456acb
Fix
kevjumba Aug 17, 2022
45f479f
Fix lint
kevjumba Aug 17, 2022
4b8c4a2
Fix
kevjumba Aug 17, 2022
b1bf602
Fix
kevjumba Aug 17, 2022
4586f00
Fix azure
kevjumba Aug 17, 2022
3b88c0b
Fix
kevjumba Aug 17, 2022
9ae8ee3
Fix
kevjumba Aug 17, 2022
1b12e4a
Fix lint and address issues
kevjumba Aug 18, 2022
0ca5048
Fix integration tests
kevjumba Aug 18, 2022
883f314
Fix
kevjumba Aug 18, 2022
ccf8716
Fix lint and address issues
kevjumba Aug 18, 2022
f05288e
Fix
kevjumba Aug 18, 2022
ee30e73
Fix
kevjumba Aug 18, 2022
ab17db9
Fix
kevjumba Aug 18, 2022
be162f5
Revert
kevjumba Aug 18, 2022
f5aa476
Fix
kevjumba Aug 18, 2022
4423dfa
Fix
kevjumba Aug 18, 2022
5806507
Fix
kevjumba Aug 18, 2022
7a4d055
Fix lint
kevjumba Aug 19, 2022
78b74b1
Fix
kevjumba Aug 19, 2022
a9e8119
Fix lint
kevjumba Aug 19, 2022
1341e3e
Fix pyarrow
kevjumba Aug 19, 2022
3d42093
Fix lint
kevjumba Aug 19, 2022
1c591f0
add requirements files
adchia Aug 19, 2022
b4da607
fix name of docs
adchia Aug 19, 2022
c3a0423
fix offline store readme
adchia Aug 19, 2022
576b57e
fix offline store readme
adchia Aug 19, 2022
69940ac
fix
adchia Aug 19, 2022
516ff76
fix
adchia Aug 19, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix lint
Signed-off-by: Kevin Zhang <kzhang@tecton.ai>
  • Loading branch information
kevjumba committed Aug 19, 2022
commit ebb934bbc4cc752ca92d1d296c35fb95becfa0d6
3 changes: 1 addition & 2 deletions sdk/python/feast/infra/contrib/azure_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,13 @@
from feast.infra.offline_stores.offline_store import RetrievalJob
from feast.infra.offline_stores.offline_utils import get_offline_store_from_config
from feast.infra.online_stores.helpers import get_online_store_from_config
from feast.infra.passthrough_provider import PassthroughProvider
from feast.infra.provider import Provider
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.registry import BaseRegistry
from feast.repo_config import RepoConfig
from feast.saved_dataset import SavedDataset
from feast.infra.passthrough_provider import PassthroughProvider

from feast.utils import (
_convert_arrow_to_proto,
_get_column_names,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker

from feast import FileSource, errors
from feast import FileSource, entity, errors
from feast.data_source import DataSource
from feast.errors import InvalidEntityType
from feast.feature_logging import LoggingConfig, LoggingSource
Expand Down Expand Up @@ -176,19 +176,21 @@ def get_historical_features(
expected_join_keys = _get_join_keys(project, feature_views, registry)
assert isinstance(config.offline_store, MsSqlServerOfflineStoreConfig)
engine = make_engine(config.offline_store)
entity_df["event_timestamp"] = pandas.to_datetime(
entity_df["event_timestamp"], utc=True
).fillna(pandas.Timestamp.now())
if isinstance(entity_df, pandas.DataFrame):
entity_df_event_timestamp_col = (
offline_utils.infer_event_timestamp_from_entity_df(dict(zip(list(entity_df.columns), list(entity_df.dtypes))))
)
entity_df[entity_df_event_timestamp_col] = pandas.to_datetime(
entity_df[entity_df_event_timestamp_col], utc=True
).fillna(pandas.Timestamp.now())

# TODO: figure out how to deal with entity dataframes that are strings
(
table_schema,
table_name,
) = _upload_entity_df_into_sqlserver_and_get_entity_schema(
engine, config, entity_df, full_feature_names=full_feature_names
)
entity_df_event_timestamp_col = (
offline_utils.infer_event_timestamp_from_entity_df(table_schema)
)

_assert_expected_columns_in_sqlserver(
expected_join_keys,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,12 @@

from feast.data_source import DataSource
from feast.infra.offline_stores.contrib.mssql_offline_store.mssql import (
MsSqlServerOfflineStoreConfig,
MsSqlServerOfflineStoreConfig, _df_to_create_table_sql
)
from feast.infra.offline_stores.contrib.mssql_offline_store.mssqlserver_source import (
MsSqlServerSource,
)
from feast.saved_dataset import SavedDatasetStorage
from feast.type_map import pa_to_mssql_type
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
Expand All @@ -43,17 +42,6 @@ def mssql_container():
container.stop()


def _df_to_create_table_sql(df: pd.DataFrame, table_name: str) -> str:
pa_table = pa.Table.from_pandas(df)

columns = [f""""{f.name}" {pa_to_mssql_type(f.type)}""" for f in pa_table.schema]
return f"""
CREATE TABLE "{table_name}" (
{", ".join(columns)}
);
"""


class MsSqlDataSourceCreator(DataSourceCreator):
tables: List[str] = []

Expand Down Expand Up @@ -106,7 +94,6 @@ def create_data_source(
# Create table

destination_name = self.get_prefixed_table_name(destination_name)
# _df_to_create_table_sql(df, destination_name)
engine.execute(_df_to_create_table_sql(df, destination_name))
# Upload dataframe to azure table
df.to_sql(destination_name, engine, index=False, if_exists="append")
Expand Down