Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
e1e210d
Broken state
kevjumba Aug 5, 2022
011d1e0
working state
kevjumba Aug 10, 2022
a6a2fce
Fix the lint issues
kevjumba Aug 10, 2022
57b63bb
Semi working state
kevjumba Aug 10, 2022
ae7ed8a
Fix
kevjumba Aug 10, 2022
421645b
Fremove print
kevjumba Aug 10, 2022
07fece5
Fix lint
kevjumba Aug 11, 2022
4062031
Run build-sphinx
kevjumba Aug 11, 2022
cb39329
Add tutorials
kevjumba Aug 11, 2022
554ca1a
Fix
kevjumba Aug 11, 2022
4a969e7
Fix?
kevjumba Aug 11, 2022
116320a
Fix lint
kevjumba Aug 11, 2022
c0b16ef
Fix
kevjumba Aug 11, 2022
44d09d0
Fix lint
kevjumba Aug 12, 2022
b6f0a79
Begin configuring tests
adchia Aug 15, 2022
2b2ff40
Fix
kevjumba Aug 15, 2022
4616366
Working version
kevjumba Aug 16, 2022
c7d9852
Fix
kevjumba Aug 17, 2022
d2e290b
Fix
kevjumba Aug 17, 2022
a726a9a
Fix
kevjumba Aug 17, 2022
32992e3
Fix lint
kevjumba Aug 17, 2022
ebb934b
Fix lint
kevjumba Aug 17, 2022
e456acb
Fix
kevjumba Aug 17, 2022
45f479f
Fix lint
kevjumba Aug 17, 2022
4b8c4a2
Fix
kevjumba Aug 17, 2022
b1bf602
Fix
kevjumba Aug 17, 2022
4586f00
Fix azure
kevjumba Aug 17, 2022
3b88c0b
Fix
kevjumba Aug 17, 2022
9ae8ee3
Fix
kevjumba Aug 17, 2022
1b12e4a
Fix lint and address issues
kevjumba Aug 18, 2022
0ca5048
Fix integration tests
kevjumba Aug 18, 2022
883f314
Fix
kevjumba Aug 18, 2022
ccf8716
Fix lint and address issues
kevjumba Aug 18, 2022
f05288e
Fix
kevjumba Aug 18, 2022
ee30e73
Fix
kevjumba Aug 18, 2022
ab17db9
Fix
kevjumba Aug 18, 2022
be162f5
Revert
kevjumba Aug 18, 2022
f5aa476
Fix
kevjumba Aug 18, 2022
4423dfa
Fix
kevjumba Aug 18, 2022
5806507
Fix
kevjumba Aug 18, 2022
7a4d055
Fix lint
kevjumba Aug 19, 2022
78b74b1
Fix
kevjumba Aug 19, 2022
a9e8119
Fix lint
kevjumba Aug 19, 2022
1341e3e
Fix pyarrow
kevjumba Aug 19, 2022
3d42093
Fix lint
kevjumba Aug 19, 2022
1c591f0
add requirements files
adchia Aug 19, 2022
b4da607
fix name of docs
adchia Aug 19, 2022
c3a0423
fix offline store readme
adchia Aug 19, 2022
576b57e
fix offline store readme
adchia Aug 19, 2022
69940ac
fix
adchia Aug 19, 2022
516ff76
fix
adchia Aug 19, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix the lint issues
Signed-off-by: Kevin Zhang <kzhang@tecton.ai>
  • Loading branch information
kevjumba committed Aug 19, 2022
commit a6a2fce9a5895074ebd36ae8aacb3fb73b05e447
4 changes: 3 additions & 1 deletion sdk/python/feast/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_NAME, FeatureView
from feast.field import Field, from_value_type
from feast.infra.offline_stores.bigquery_source import BigQuerySource
from feast.infra.offline_stores.contrib.mssql_offline_store.mssqlserver_source import MsSqlServerSource
from feast.infra.offline_stores.contrib.mssql_offline_store.mssqlserver_source import (
MsSqlServerSource,
)
from feast.infra.offline_stores.file_source import FileSource
from feast.infra.offline_stores.redshift_source import RedshiftSource
from feast.infra.offline_stores.snowflake_source import SnowflakeSource
Expand Down
57 changes: 34 additions & 23 deletions sdk/python/feast/infra/contrib/azure_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,23 @@
from feast.infra.offline_stores.offline_store import RetrievalJob
from feast.infra.offline_stores.offline_utils import get_offline_store_from_config
from feast.infra.online_stores.helpers import get_online_store_from_config
from feast.infra.provider import (
Provider,
)
from feast.infra.provider import Provider
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.registry import Registry
from feast.registry import BaseRegistry
from feast.repo_config import RepoConfig
from feast.saved_dataset import SavedDataset
from feast.usage import RatioSampler, log_exceptions_and_usage, set_usage_attribute
from feast.utils import make_tzaware, _convert_arrow_to_proto, _get_column_names, _run_pyarrow_field_mapping
from feast.utils import (
_convert_arrow_to_proto,
_get_column_names,
_run_pyarrow_field_mapping,
make_tzaware,
)

DEFAULT_BATCH_SIZE = 10_000


class AzureProvider(Provider):
def __init__(self, config: RepoConfig):
self.repo_config = config
Expand All @@ -38,7 +42,7 @@ def __init__(self, config: RepoConfig):
else None
)

#@log_exceptions_and_usage(registry="az")
# @log_exceptions_and_usage(registry="az")
def update_infra(
self,
project: str,
Expand All @@ -59,7 +63,7 @@ def update_infra(
partial=partial,
)

#@log_exceptions_and_usage(registry="az")
# @log_exceptions_and_usage(registry="az")
def teardown_infra(
self,
project: str,
Expand All @@ -69,7 +73,7 @@ def teardown_infra(
if self.online_store:
self.online_store.teardown(self.repo_config, tables, entities)

#@log_exceptions_and_usage(registry="az")
# @log_exceptions_and_usage(registry="az")
def online_write_batch(
self,
config: RepoConfig,
Expand All @@ -82,7 +86,7 @@ def online_write_batch(
if self.online_store:
self.online_store.online_write_batch(config, table, data, progress)

#@log_exceptions_and_usage(sampler=RatioSampler(ratio=0.001), registry="az")
# @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.001), registry="az")
def online_read(
self,
config: RepoConfig,
Expand All @@ -92,16 +96,23 @@ def online_read(
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
result = []
if self.online_store:
result = self.online_store.online_read(config, table, entity_keys, requested_features)
result = self.online_store.online_read(
config, table, entity_keys, requested_features
)
return result

def ingest_df(
self, feature_view: FeatureView, entities: List[Entity], df: pandas.DataFrame,
self,
feature_view: FeatureView,
entities: List[Entity],
df: pandas.DataFrame,
):
table = pa.Table.from_pandas(df)

if feature_view.batch_source.field_mapping is not None:
table = _run_pyarrow_field_mapping(table, feature_view.batch_source.field_mapping)
table = _run_pyarrow_field_mapping(
table, feature_view.batch_source.field_mapping
)

join_keys = {entity.join_key: entity.value_type for entity in entities}
rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys)
Expand All @@ -116,7 +127,7 @@ def materialize_single_feature_view(
feature_view: FeatureView,
start_date: datetime,
end_date: datetime,
registry: Registry,
registry: BaseRegistry,
project: str,
tqdm_builder: Callable[[int], tqdm],
) -> None:
Expand All @@ -136,7 +147,7 @@ def materialize_single_feature_view(
data_source=feature_view.batch_source,
join_key_columns=join_key_columns,
feature_name_columns=feature_name_columns,
event_timestamp_column=event_timestamp_column,
timestamp_field=event_timestamp_column,
created_timestamp_column=created_timestamp_column,
start_date=start_date,
end_date=end_date,
Expand All @@ -145,7 +156,9 @@ def materialize_single_feature_view(
table = offline_job.to_arrow()

if feature_view.batch_source.field_mapping is not None:
table = _run_pyarrow_field_mapping(table, feature_view.batch_source.field_mapping)
table = _run_pyarrow_field_mapping(
table, feature_view.batch_source.field_mapping
)

join_keys = {entity.join_key: entity.value_type for entity in entities}

Expand All @@ -159,14 +172,14 @@ def materialize_single_feature_view(
lambda x: pbar.update(x),
)

#@log_exceptions_and_usage(registry="az")
# @log_exceptions_and_usage(registry="az")
def get_historical_features(
self,
config: RepoConfig,
feature_views: List[FeatureView],
feature_refs: List[str],
entity_df: Union[pandas.DataFrame, str],
registry: Registry,
registry: BaseRegistry,
project: str,
full_feature_names: bool,
) -> RetrievalJob:
Expand All @@ -182,9 +195,7 @@ def get_historical_features(
return job

def retrieve_saved_dataset(
self,
config: RepoConfig,
dataset: SavedDataset
self, config: RepoConfig, dataset: SavedDataset
) -> RetrievalJob:
feature_name_columns = [
ref.replace(":", "__") if dataset.full_feature_names else ref.split(":")[1]
Expand All @@ -209,7 +220,7 @@ def write_feature_service_logs(
feature_service: FeatureService,
logs: Union[pa.Table, str],
config: RepoConfig,
registry: Registry,
registry: BaseRegistry,
):
assert (
feature_service.logging_config is not None
Expand All @@ -229,7 +240,7 @@ def retrieve_feature_service_logs(
start_date: datetime,
end_date: datetime,
config: RepoConfig,
registry: Registry,
registry: BaseRegistry,
) -> RetrievalJob:
assert (
feature_service.logging_config is not None
Expand All @@ -249,4 +260,4 @@ def retrieve_feature_service_logs(
timestamp_field=ts_column,
start_date=make_tzaware(start_date),
end_date=make_tzaware(end_date),
)
)
Loading