-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Expand file tree
/
Copy pathdynamodb.py
More file actions
748 lines (647 loc) · 26.7 KB
/
dynamodb.py
File metadata and controls
748 lines (647 loc) · 26.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
# Copyright 2021 The Feast Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import contextlib
import itertools
import logging
from collections import OrderedDict
from datetime import datetime
from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union
from aiobotocore.config import AioConfig
from pydantic import StrictBool, StrictStr
from feast import Entity, FeatureView, utils
from feast.infra.infra_object import DYNAMODB_INFRA_OBJECT_CLASS_TYPE, InfraObject
from feast.infra.online_stores.helpers import compute_entity_id
from feast.infra.online_stores.online_store import OnlineStore
from feast.infra.supported_async_methods import SupportedAsyncMethods
from feast.infra.utils.aws_utils import dynamo_write_items_async
from feast.protos.feast.core.DynamoDBTable_pb2 import (
DynamoDBTable as DynamoDBTableProto,
)
from feast.protos.feast.core.InfraObject_pb2 import InfraObject as InfraObjectProto
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.repo_config import FeastConfigBaseModel, RepoConfig
from feast.utils import get_user_agent
try:
import boto3
from aiobotocore import session
from boto3.dynamodb.types import TypeDeserializer
from botocore.config import Config
from botocore.exceptions import ClientError
except ImportError as e:
from feast.errors import FeastExtrasDependencyImportError
raise FeastExtrasDependencyImportError("aws", str(e))
logger = logging.getLogger(__name__)
class DynamoDBOnlineStoreConfig(FeastConfigBaseModel):
"""Online store config for DynamoDB store"""
type: Literal["dynamodb"] = "dynamodb"
"""Online store type selector"""
batch_size: int = 40
"""Number of items to retrieve in a DynamoDB BatchGetItem call."""
endpoint_url: Union[str, None] = None
"""DynamoDB local development endpoint Url, i.e. http://localhost:8000"""
region: StrictStr
"""AWS Region Name"""
table_name_template: StrictStr = "{project}.{table_name}"
"""DynamoDB table name template"""
consistent_reads: StrictBool = False
"""Whether to read from Dynamodb by forcing consistent reads"""
tags: Union[Dict[str, str], None] = None
"""AWS resource tags added to each table"""
session_based_auth: bool = False
"""AWS session based client authentication"""
max_pool_connections: int = 10
"""Max number of connections for async Dynamodb operations"""
class DynamoDBOnlineStore(OnlineStore):
"""
AWS DynamoDB implementation of the online store interface.
Attributes:
_dynamodb_client: Boto3 DynamoDB client.
_dynamodb_resource: Boto3 DynamoDB resource.
"""
_dynamodb_client = None
_dynamodb_resource = None
async def initialize(self, config: RepoConfig):
await _get_aiodynamodb_client(
config.online_store.region, config.online_store.max_pool_connections
)
async def close(self):
await _aiodynamodb_close()
@property
def async_supported(self) -> SupportedAsyncMethods:
return SupportedAsyncMethods(read=True, write=True)
def update(
self,
config: RepoConfig,
tables_to_delete: Sequence[FeatureView],
tables_to_keep: Sequence[FeatureView],
entities_to_delete: Sequence[Entity],
entities_to_keep: Sequence[Entity],
partial: bool,
):
"""
Update tables from the DynamoDB Online Store.
Args:
config: The RepoConfig for the current FeatureStore.
tables_to_delete: Tables to delete from the DynamoDB Online Store.
tables_to_keep: Tables to keep in the DynamoDB Online Store.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
dynamodb_client = self._get_dynamodb_client(
online_config.region,
online_config.endpoint_url,
online_config.session_based_auth,
)
dynamodb_resource = self._get_dynamodb_resource(
online_config.region,
online_config.endpoint_url,
online_config.session_based_auth,
)
# Add Tags attribute to creation request only if configured to prevent
# TagResource permission issues, even with an empty Tags array.
kwargs = (
{
"Tags": [
{"Key": key, "Value": value}
for key, value in online_config.tags.items()
]
}
if online_config.tags
else {}
)
for table_instance in tables_to_keep:
try:
dynamodb_resource.create_table(
TableName=_get_table_name(online_config, config, table_instance),
KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}],
AttributeDefinitions=[
{"AttributeName": "entity_id", "AttributeType": "S"}
],
BillingMode="PAY_PER_REQUEST",
**kwargs,
)
except ClientError as ce:
# If the table creation fails with ResourceInUseException,
# it means the table already exists or is being created.
# Otherwise, re-raise the exception
if ce.response["Error"]["Code"] != "ResourceInUseException":
raise
for table_instance in tables_to_keep:
dynamodb_client.get_waiter("table_exists").wait(
TableName=_get_table_name(online_config, config, table_instance)
)
for table_to_delete in tables_to_delete:
_delete_table_idempotent(
dynamodb_resource,
_get_table_name(online_config, config, table_to_delete),
)
def teardown(
self,
config: RepoConfig,
tables: Sequence[FeatureView],
entities: Sequence[Entity],
):
"""
Delete tables from the DynamoDB Online Store.
Args:
config: The RepoConfig for the current FeatureStore.
tables: Tables to delete from the feature repo.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
dynamodb_resource = self._get_dynamodb_resource(
online_config.region,
online_config.endpoint_url,
online_config.session_based_auth,
)
for table in tables:
_delete_table_idempotent(
dynamodb_resource, _get_table_name(online_config, config, table)
)
def online_write_batch(
self,
config: RepoConfig,
table: FeatureView,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
"""
Write a batch of feature rows to online DynamoDB store.
Note: This method applies a ``batch_writer`` to automatically handle any unprocessed items
and resend them as needed, this is useful if you're loading a lot of data at a time.
Args:
config: The RepoConfig for the current FeatureStore.
table: Feast FeatureView.
data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key,
a dict containing feature values, an event timestamp for the row, and
the created timestamp for the row if it exists.
progress: Optional function to be called once every mini-batch of rows is written to
the online store. Can be used to display progress.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
dynamodb_resource = self._get_dynamodb_resource(
online_config.region,
online_config.endpoint_url,
online_config.session_based_auth,
)
table_instance = dynamodb_resource.Table(
_get_table_name(online_config, config, table)
)
self._write_batch_non_duplicates(table_instance, data, progress, config)
async def online_write_batch_async(
self,
config: RepoConfig,
table: FeatureView,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
"""
Writes a batch of feature rows to the online store asynchronously.
If a tz-naive timestamp is passed to this method, it is assumed to be UTC.
Args:
config: The config for the current feature store.
table: Feature view to which these feature rows correspond.
data: A list of quadruplets containing feature data. Each quadruplet contains an entity
key, a dict containing feature values, an event timestamp for the row, and the created
timestamp for the row if it exists.
progress: Function to be called once a batch of rows is written to the online store, used
to show progress.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
table_name = _get_table_name(online_config, config, table)
items = [
_to_client_write_item(config, entity_key, features, timestamp)
for entity_key, features, timestamp, _ in _latest_data_to_write(data)
]
client = await _get_aiodynamodb_client(
online_config.region, config.online_store.max_pool_connections
)
await dynamo_write_items_async(client, table_name, items)
def online_read(
self,
config: RepoConfig,
table: FeatureView,
entity_keys: List[EntityKeyProto],
requested_features: Optional[List[str]] = None,
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
"""
Retrieve feature values from the online DynamoDB store.
Args:
config: The RepoConfig for the current FeatureStore.
table: Feast FeatureView.
entity_keys: a list of entity keys that should be read from the FeatureStore.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
dynamodb_resource = self._get_dynamodb_resource(
online_config.region,
online_config.endpoint_url,
online_config.session_based_auth,
)
table_instance = dynamodb_resource.Table(
_get_table_name(online_config, config, table)
)
batch_size = online_config.batch_size
entity_ids = self._to_entity_ids(config, entity_keys)
entity_ids_iter = iter(entity_ids)
result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = []
while True:
batch = list(itertools.islice(entity_ids_iter, batch_size))
# No more items to insert
if len(batch) == 0:
break
batch_entity_ids = self._to_resource_batch_get_payload(
online_config, table_instance.name, batch
)
response = dynamodb_resource.batch_get_item(
RequestItems=batch_entity_ids,
)
batch_result = self._process_batch_get_response(
table_instance.name, response, entity_ids, batch
)
result.extend(batch_result)
return result
async def online_read_async(
self,
config: RepoConfig,
table: FeatureView,
entity_keys: List[EntityKeyProto],
requested_features: Optional[List[str]] = None,
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
"""
Reads features values for the given entity keys asynchronously.
Args:
config: The config for the current feature store.
table: The feature view whose feature values should be read.
entity_keys: The list of entity keys for which feature values should be read.
requested_features: The list of features that should be read.
Returns:
A list of the same length as entity_keys. Each item in the list is a tuple where the first
item is the event timestamp for the row, and the second item is a dict mapping feature names
to values, which are returned in proto format.
"""
online_config = config.online_store
assert isinstance(online_config, DynamoDBOnlineStoreConfig)
batch_size = online_config.batch_size
entity_ids = self._to_entity_ids(config, entity_keys)
entity_ids_iter = iter(entity_ids)
table_name = _get_table_name(online_config, config, table)
deserialize = TypeDeserializer().deserialize
def to_tbl_resp(raw_client_response):
return {
"entity_id": deserialize(raw_client_response["entity_id"]),
"event_ts": deserialize(raw_client_response["event_ts"]),
"values": deserialize(raw_client_response["values"]),
}
batches = []
entity_id_batches = []
while True:
batch = list(itertools.islice(entity_ids_iter, batch_size))
if not batch:
break
entity_id_batch = self._to_client_batch_get_payload(
online_config, table_name, batch
)
batches.append(batch)
entity_id_batches.append(entity_id_batch)
client = await _get_aiodynamodb_client(
online_config.region, online_config.max_pool_connections
)
response_batches = await asyncio.gather(
*[
client.batch_get_item(
RequestItems=entity_id_batch,
)
for entity_id_batch in entity_id_batches
]
)
result_batches = []
for batch, response in zip(batches, response_batches):
result_batch = self._process_batch_get_response(
table_name,
response,
entity_ids,
batch,
to_tbl_response=to_tbl_resp,
)
result_batches.append(result_batch)
return list(itertools.chain(*result_batches))
def _get_dynamodb_client(
self,
region: str,
endpoint_url: Optional[str] = None,
session_based_auth: Optional[bool] = False,
):
if self._dynamodb_client is None:
self._dynamodb_client = _initialize_dynamodb_client(
region, endpoint_url, session_based_auth
)
return self._dynamodb_client
def _get_dynamodb_resource(
self,
region: str,
endpoint_url: Optional[str] = None,
session_based_auth: Optional[bool] = False,
):
if self._dynamodb_resource is None:
self._dynamodb_resource = _initialize_dynamodb_resource(
region, endpoint_url, session_based_auth
)
return self._dynamodb_resource
def _sort_dynamodb_response(
self,
responses: list,
order: list,
to_tbl_response: Callable = lambda raw_dict: raw_dict,
) -> Any:
"""DynamoDB Batch Get Item doesn't return items in a particular order."""
# Assign an index to order
order_with_index = {value: idx for idx, value in enumerate(order)}
# Sort table responses by index
table_responses_ordered: Any = [
(order_with_index[tbl_res["entity_id"]], tbl_res)
for tbl_res in map(to_tbl_response, responses)
]
table_responses_ordered = sorted(
table_responses_ordered, key=lambda tup: tup[0]
)
_, table_responses_ordered = zip(*table_responses_ordered)
return table_responses_ordered
def _write_batch_non_duplicates(
self,
table_instance,
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
config: RepoConfig,
):
"""Deduplicate write batch request items on ``entity_id`` primary key."""
with table_instance.batch_writer(overwrite_by_pkeys=["entity_id"]) as batch:
for entity_key, features, timestamp, created_ts in data:
batch.put_item(
Item=_to_resource_write_item(
config, entity_key, features, timestamp
)
)
if progress:
progress(1)
def _process_batch_get_response(
self, table_name, response, entity_ids, batch, **sort_kwargs
):
response = response.get("Responses")
table_responses = response.get(table_name)
batch_result = []
if table_responses:
table_responses = self._sort_dynamodb_response(
table_responses, entity_ids, **sort_kwargs
)
entity_idx = 0
for tbl_res in table_responses:
entity_id = tbl_res["entity_id"]
while entity_id != batch[entity_idx]:
batch_result.append((None, None))
entity_idx += 1
res = {}
for feature_name, value_bin in tbl_res["values"].items():
val = ValueProto()
val.ParseFromString(value_bin.value)
res[feature_name] = val
batch_result.append((datetime.fromisoformat(tbl_res["event_ts"]), res))
entity_idx += 1
# Not all entities in a batch may have responses
# Pad with remaining values in batch that were not found
batch_size_nones = ((None, None),) * (len(batch) - len(batch_result))
batch_result.extend(batch_size_nones)
return batch_result
@staticmethod
def _to_entity_ids(config: RepoConfig, entity_keys: List[EntityKeyProto]):
return [
compute_entity_id(
entity_key,
entity_key_serialization_version=config.entity_key_serialization_version,
)
for entity_key in entity_keys
]
@staticmethod
def _to_resource_batch_get_payload(online_config, table_name, batch):
return {
table_name: {
"Keys": [{"entity_id": entity_id} for entity_id in batch],
"ConsistentRead": online_config.consistent_reads,
}
}
@staticmethod
def _to_client_batch_get_payload(online_config, table_name, batch):
return {
table_name: {
"Keys": [{"entity_id": {"S": entity_id}} for entity_id in batch],
"ConsistentRead": online_config.consistent_reads,
}
}
_aioboto_session = None
_aioboto_client = None
def _get_aioboto_session():
global _aioboto_session
if _aioboto_session is None:
logger.debug("initializing the aiobotocore session")
_aioboto_session = session.get_session()
return _aioboto_session
async def _get_aiodynamodb_client(region: str, max_pool_connections: int):
global _aioboto_client
if _aioboto_client is None:
logger.debug("initializing the aiobotocore dynamodb client")
client_context = _get_aioboto_session().create_client(
"dynamodb",
region_name=region,
config=AioConfig(max_pool_connections=max_pool_connections),
)
context_stack = contextlib.AsyncExitStack()
_aioboto_client = await context_stack.enter_async_context(client_context)
return _aioboto_client
async def _aiodynamodb_close():
global _aioboto_client
if _aioboto_client:
await _aioboto_client.close()
def _initialize_dynamodb_client(
region: str,
endpoint_url: Optional[str] = None,
session_based_auth: Optional[bool] = False,
):
if session_based_auth:
return boto3.Session().client(
"dynamodb",
region_name=region,
endpoint_url=endpoint_url,
config=Config(user_agent=get_user_agent()),
)
else:
return boto3.client(
"dynamodb",
region_name=region,
endpoint_url=endpoint_url,
config=Config(user_agent=get_user_agent()),
)
def _initialize_dynamodb_resource(
region: str,
endpoint_url: Optional[str] = None,
session_based_auth: Optional[bool] = False,
):
if session_based_auth:
return boto3.Session().resource(
"dynamodb", region_name=region, endpoint_url=endpoint_url
)
else:
return boto3.resource("dynamodb", region_name=region, endpoint_url=endpoint_url)
# TODO(achals): This form of user-facing templating is experimental.
# Please refer to https://github.com/feast-dev/feast/issues/2438 before building on top of it,
def _get_table_name(
online_config: DynamoDBOnlineStoreConfig, config: RepoConfig, table: FeatureView
) -> str:
return online_config.table_name_template.format(
project=config.project, table_name=table.name
)
def _delete_table_idempotent(
dynamodb_resource,
table_name: str,
):
try:
table = dynamodb_resource.Table(table_name)
table.delete()
logger.info(f"Dynamo table {table_name} was deleted")
except ClientError as ce:
# If the table deletion fails with ResourceNotFoundException,
# it means the table has already been deleted.
# Otherwise, re-raise the exception
if ce.response["Error"]["Code"] != "ResourceNotFoundException":
raise
else:
logger.warning(f"Trying to delete table that doesn't exist: {table_name}")
class DynamoDBTable(InfraObject):
"""
A DynamoDB table managed by Feast.
Attributes:
name: The name of the table.
region: The region of the table.
endpoint_url: Local DynamoDB Endpoint Url.
_dynamodb_client: Boto3 DynamoDB client.
_dynamodb_resource: Boto3 DynamoDB resource.
"""
region: str
endpoint_url = None
_dynamodb_client = None
_dynamodb_resource = None
def __init__(self, name: str, region: str, endpoint_url: Optional[str] = None):
super().__init__(name)
self.region = region
self.endpoint_url = endpoint_url
def to_infra_object_proto(self) -> InfraObjectProto:
dynamodb_table_proto = self.to_proto()
return InfraObjectProto(
infra_object_class_type=DYNAMODB_INFRA_OBJECT_CLASS_TYPE,
dynamodb_table=dynamodb_table_proto,
)
def to_proto(self) -> Any:
dynamodb_table_proto = DynamoDBTableProto()
dynamodb_table_proto.name = self.name
dynamodb_table_proto.region = self.region
return dynamodb_table_proto
@staticmethod
def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any:
return DynamoDBTable(
name=infra_object_proto.dynamodb_table.name,
region=infra_object_proto.dynamodb_table.region,
)
@staticmethod
def from_proto(dynamodb_table_proto: DynamoDBTableProto) -> Any:
return DynamoDBTable(
name=dynamodb_table_proto.name,
region=dynamodb_table_proto.region,
)
def update(self):
dynamodb_client = self._get_dynamodb_client(self.region, self.endpoint_url)
dynamodb_resource = self._get_dynamodb_resource(self.region, self.endpoint_url)
try:
dynamodb_resource.create_table(
TableName=f"{self.name}",
KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}],
AttributeDefinitions=[
{"AttributeName": "entity_id", "AttributeType": "S"}
],
BillingMode="PAY_PER_REQUEST",
)
except ClientError as ce:
# If the table creation fails with ResourceInUseException,
# it means the table already exists or is being created.
# Otherwise, re-raise the exception
if ce.response["Error"]["Code"] != "ResourceInUseException":
raise
dynamodb_client.get_waiter("table_exists").wait(TableName=f"{self.name}")
def teardown(self):
dynamodb_resource = self._get_dynamodb_resource(self.region, self.endpoint_url)
_delete_table_idempotent(dynamodb_resource, self.name)
def _get_dynamodb_client(self, region: str, endpoint_url: Optional[str] = None):
if self._dynamodb_client is None:
self._dynamodb_client = _initialize_dynamodb_client(region, endpoint_url)
return self._dynamodb_client
def _get_dynamodb_resource(self, region: str, endpoint_url: Optional[str] = None):
if self._dynamodb_resource is None:
self._dynamodb_resource = _initialize_dynamodb_resource(
region, endpoint_url
)
return self._dynamodb_resource
def _to_resource_write_item(config, entity_key, features, timestamp):
entity_id = compute_entity_id(
entity_key,
entity_key_serialization_version=config.entity_key_serialization_version,
)
return {
"entity_id": entity_id, # PartitionKey
"event_ts": str(utils.make_tzaware(timestamp)),
"values": {
k: v.SerializeToString()
for k, v in features.items() # Serialized Features
},
}
def _to_client_write_item(config, entity_key, features, timestamp):
entity_id = compute_entity_id(
entity_key,
entity_key_serialization_version=config.entity_key_serialization_version,
)
return {
"entity_id": {"S": entity_id}, # PartitionKey
"event_ts": {"S": str(utils.make_tzaware(timestamp))},
"values": {
"M": {
k: {"B": v.SerializeToString()}
for k, v in features.items() # Serialized Features
}
},
}
def _latest_data_to_write(
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
):
as_hashable = ((d[0].SerializeToString(), d) for d in data)
sorted_data = sorted(as_hashable, key=lambda ah: (ah[0], ah[1][2]))
return (v for _, v in OrderedDict((ah[0], ah[1]) for ah in sorted_data).items())