forked from feast-dev/feast
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpassthrough_provider.py
More file actions
283 lines (245 loc) · 9.75 KB
/
passthrough_provider.py
File metadata and controls
283 lines (245 loc) · 9.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import pandas
import pyarrow
import pyarrow as pa
from tqdm import tqdm
from feast import FeatureService
from feast.entity import Entity
from feast.feature_logging import FeatureServiceLoggingSource
from feast.feature_view import FeatureView
from feast.infra.offline_stores.offline_store import RetrievalJob
from feast.infra.offline_stores.offline_utils import get_offline_store_from_config
from feast.infra.online_stores.helpers import get_online_store_from_config
from feast.infra.provider import (
Provider,
_convert_arrow_to_proto,
_get_column_names,
_run_field_mapping,
)
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.registry import Registry
from feast.repo_config import RepoConfig
from feast.saved_dataset import SavedDataset
from feast.usage import RatioSampler, log_exceptions_and_usage, set_usage_attribute
from feast.utils import make_tzaware
DEFAULT_BATCH_SIZE = 10_000
class PassthroughProvider(Provider):
"""
The Passthrough provider delegates all operations to the underlying online and offline stores.
"""
def __init__(self, config: RepoConfig):
super().__init__(config)
self.repo_config = config
self._offline_store = None
self._online_store = None
@property
def online_store(self):
if not self._online_store and self.repo_config.online_store:
self._online_store = get_online_store_from_config(
self.repo_config.online_store
)
return self._online_store
@property
def offline_store(self):
if not self._offline_store:
self._offline_store = get_offline_store_from_config(
self.repo_config.offline_store
)
return self._offline_store
def update_infra(
self,
project: str,
tables_to_delete: Sequence[FeatureView],
tables_to_keep: Sequence[FeatureView],
entities_to_delete: Sequence[Entity],
entities_to_keep: Sequence[Entity],
partial: bool,
):
set_usage_attribute("provider", self.__class__.__name__)
# Call update only if there is an online store
if self.online_store:
self.online_store.update(
config=self.repo_config,
tables_to_delete=tables_to_delete,
tables_to_keep=tables_to_keep,
entities_to_keep=entities_to_keep,
entities_to_delete=entities_to_delete,
partial=partial,
)
def teardown_infra(
self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity],
) -> None:
set_usage_attribute("provider", self.__class__.__name__)
if self.online_store:
self.online_store.teardown(self.repo_config, tables, entities)
def online_write_batch(
self,
config: RepoConfig,
table: FeatureView,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
set_usage_attribute("provider", self.__class__.__name__)
if self.online_store:
self.online_store.online_write_batch(config, table, data, progress)
@log_exceptions_and_usage(sampler=RatioSampler(ratio=0.001))
def online_read(
self,
config: RepoConfig,
table: FeatureView,
entity_keys: List[EntityKeyProto],
requested_features: List[str] = None,
) -> List:
set_usage_attribute("provider", self.__class__.__name__)
result = []
if self.online_store:
result = self.online_store.online_read(
config, table, entity_keys, requested_features
)
return result
def ingest_df(
self, feature_view: FeatureView, entities: List[Entity], df: pandas.DataFrame,
):
set_usage_attribute("provider", self.__class__.__name__)
table = pa.Table.from_pandas(df)
if feature_view.batch_source.field_mapping is not None:
table = _run_field_mapping(table, feature_view.batch_source.field_mapping)
join_keys = {entity.join_key: entity.value_type for entity in entities}
rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys)
self.online_write_batch(
self.repo_config, feature_view, rows_to_write, progress=None
)
def materialize_single_feature_view(
self,
config: RepoConfig,
feature_view: FeatureView,
start_date: datetime,
end_date: datetime,
registry: Registry,
project: str,
tqdm_builder: Callable[[int], tqdm],
) -> None:
set_usage_attribute("provider", self.__class__.__name__)
entities = []
for entity_name in feature_view.entities:
entities.append(registry.get_entity(entity_name, project))
(
join_key_columns,
feature_name_columns,
timestamp_field,
created_timestamp_column,
) = _get_column_names(feature_view, entities)
offline_job = self.offline_store.pull_latest_from_table_or_query(
config=config,
data_source=feature_view.batch_source,
join_key_columns=join_key_columns,
feature_name_columns=feature_name_columns,
timestamp_field=timestamp_field,
created_timestamp_column=created_timestamp_column,
start_date=start_date,
end_date=end_date,
)
table = offline_job.to_arrow()
if feature_view.batch_source.field_mapping is not None:
table = _run_field_mapping(table, feature_view.batch_source.field_mapping)
join_key_to_value_type = {
entity.name: entity.dtype.to_value_type()
for entity in feature_view.entity_columns
}
with tqdm_builder(table.num_rows) as pbar:
for batch in table.to_batches(DEFAULT_BATCH_SIZE):
rows_to_write = _convert_arrow_to_proto(
batch, feature_view, join_key_to_value_type
)
self.online_write_batch(
self.repo_config,
feature_view,
rows_to_write,
lambda x: pbar.update(x),
)
def get_historical_features(
self,
config: RepoConfig,
feature_views: List[FeatureView],
feature_refs: List[str],
entity_df: Union[pandas.DataFrame, str],
registry: Registry,
project: str,
full_feature_names: bool,
) -> RetrievalJob:
set_usage_attribute("provider", self.__class__.__name__)
job = self.offline_store.get_historical_features(
config=config,
feature_views=feature_views,
feature_refs=feature_refs,
entity_df=entity_df,
registry=registry,
project=project,
full_feature_names=full_feature_names,
)
return job
def retrieve_saved_dataset(
self, config: RepoConfig, dataset: SavedDataset
) -> RetrievalJob:
set_usage_attribute("provider", self.__class__.__name__)
feature_name_columns = [
ref.replace(":", "__") if dataset.full_feature_names else ref.split(":")[1]
for ref in dataset.features
]
# ToDo: replace hardcoded value
event_ts_column = "event_timestamp"
return self.offline_store.pull_all_from_table_or_query(
config=config,
data_source=dataset.storage.to_data_source(),
join_key_columns=dataset.join_keys,
feature_name_columns=feature_name_columns,
timestamp_field=event_ts_column,
start_date=make_tzaware(dataset.min_event_timestamp), # type: ignore
end_date=make_tzaware(dataset.max_event_timestamp + timedelta(seconds=1)), # type: ignore
)
def write_feature_service_logs(
self,
feature_service: FeatureService,
logs: Union[pyarrow.Table, str],
config: RepoConfig,
registry: Registry,
):
assert (
feature_service.logging_config is not None
), "Logging should be configured for the feature service before calling this function"
self.offline_store.write_logged_features(
config=config,
data=logs,
source=FeatureServiceLoggingSource(feature_service, config.project),
logging_config=feature_service.logging_config,
registry=registry,
)
def retrieve_feature_service_logs(
self,
feature_service: FeatureService,
start_date: datetime,
end_date: datetime,
config: RepoConfig,
registry: Registry,
) -> RetrievalJob:
assert (
feature_service.logging_config is not None
), "Logging should be configured for the feature service before calling this function"
logging_source = FeatureServiceLoggingSource(feature_service, config.project)
schema = logging_source.get_schema(registry)
logging_config = feature_service.logging_config
ts_column = logging_source.get_log_timestamp_column()
columns = list(set(schema.names) - {ts_column})
return self.offline_store.pull_all_from_table_or_query(
config=config,
data_source=logging_config.destination.to_data_source(),
join_key_columns=[],
feature_name_columns=columns,
timestamp_field=ts_column,
start_date=make_tzaware(start_date),
end_date=make_tzaware(end_date),
)