|
| 1 | +import io |
| 2 | +import json |
| 3 | +from typing import TYPE_CHECKING |
| 4 | +from urllib.parse import urlparse |
| 5 | + |
| 6 | +import pandas as pd |
| 7 | + |
| 8 | +from feast.constants import ConfigOptions |
| 9 | +from feast.contrib.validation.base import serialize_udf |
| 10 | +from feast.staging.storage_client import get_staging_client |
| 11 | + |
| 12 | +try: |
| 13 | + from great_expectations.core import ExpectationSuite |
| 14 | + from great_expectations.dataset import PandasDataset |
| 15 | +except ImportError: |
| 16 | + raise ImportError( |
| 17 | + "great_expectations must be installed to enable validation functionality. " |
| 18 | + "Please install feast[validation]" |
| 19 | + ) |
| 20 | + |
| 21 | +try: |
| 22 | + from pyspark.sql.types import BooleanType |
| 23 | +except ImportError: |
| 24 | + raise ImportError( |
| 25 | + "pyspark must be installed to enable validation functionality. " |
| 26 | + "Please install feast[validation]" |
| 27 | + ) |
| 28 | + |
| 29 | + |
| 30 | +if TYPE_CHECKING: |
| 31 | + from feast import Client, FeatureTable |
| 32 | + |
| 33 | + |
| 34 | +GE_PACKED_ARCHIVE = "https://storage.googleapis.com/feast-jobs/spark/validation/pylibs-ge-%(platform)s.tar.gz" |
| 35 | +_UNSET = object() |
| 36 | + |
| 37 | + |
| 38 | +class ValidationUDF: |
| 39 | + def __init__(self, name: str, pickled_code: bytes): |
| 40 | + self.name = name |
| 41 | + self.pickled_code = pickled_code |
| 42 | + |
| 43 | + |
| 44 | +def create_validation_udf(name: str, expectations: ExpectationSuite) -> ValidationUDF: |
| 45 | + """ |
| 46 | + Wraps your expectations into Spark UDF. |
| 47 | +
|
| 48 | + Expectations should be generated & validated using training dataset: |
| 49 | + >>> from great_expectations.dataset import PandasDataset |
| 50 | + >>> ds = PandasDataset.from_dataset(you_training_df) |
| 51 | + >>> ds.expect_column_values_to_be_between('column', 0, 100) |
| 52 | +
|
| 53 | + >>> expectations = ds.get_expectation_suite() |
| 54 | +
|
| 55 | + Important: you expectations should pass on training dataset, only successful checks |
| 56 | + will be converted and stored in ExpectationSuite. |
| 57 | +
|
| 58 | + Now you can create UDF that will validate data during ingestion: |
| 59 | + >>> create_validation_udf("myValidation", expectations) |
| 60 | +
|
| 61 | + :param name |
| 62 | + :param expectations: collection of expectation gathered on training dataset |
| 63 | + :return: ValidationUDF with serialized code |
| 64 | + """ |
| 65 | + |
| 66 | + def udf(df: pd.DataFrame) -> pd.Series: |
| 67 | + ds = PandasDataset.from_dataset(df) |
| 68 | + result = ds.validate(expectations, result_format="COMPLETE") |
| 69 | + valid_rows = pd.Series([True] * df.shape[0]) |
| 70 | + |
| 71 | + for check in result.results: |
| 72 | + if check.success: |
| 73 | + continue |
| 74 | + |
| 75 | + if check.exception_info["raised_exception"]: |
| 76 | + # ToDo: probably we should mark all rows as invalid |
| 77 | + continue |
| 78 | + |
| 79 | + valid_rows.iloc[check.result["unexpected_index_list"]] = False |
| 80 | + |
| 81 | + return valid_rows |
| 82 | + |
| 83 | + pickled_code = serialize_udf(udf, BooleanType()) |
| 84 | + return ValidationUDF(name, pickled_code) |
| 85 | + |
| 86 | + |
| 87 | +def apply_validation( |
| 88 | + client: "Client", |
| 89 | + feature_table: "FeatureTable", |
| 90 | + udf: ValidationUDF, |
| 91 | + validation_window_secs: int, |
| 92 | + include_py_libs=_UNSET, |
| 93 | +): |
| 94 | + """ |
| 95 | + Uploads validation udf code to staging location & |
| 96 | + stores path to udf code and required python libraries as FeatureTable labels. |
| 97 | + """ |
| 98 | + include_py_libs = ( |
| 99 | + include_py_libs if include_py_libs is not _UNSET else GE_PACKED_ARCHIVE |
| 100 | + ) |
| 101 | + |
| 102 | + staging_location = client._config.get(ConfigOptions.SPARK_STAGING_LOCATION).rstrip( |
| 103 | + "/" |
| 104 | + ) |
| 105 | + staging_scheme = urlparse(staging_location).scheme |
| 106 | + staging_client = get_staging_client(staging_scheme) |
| 107 | + |
| 108 | + pickled_code_fp = io.BytesIO(udf.pickled_code) |
| 109 | + remote_path = f"{staging_location}/udfs/{udf.name}.pickle" |
| 110 | + staging_client.upload_fileobj( |
| 111 | + pickled_code_fp, f"{udf.name}.pickle", remote_uri=urlparse(remote_path) |
| 112 | + ) |
| 113 | + |
| 114 | + feature_table.labels.update( |
| 115 | + { |
| 116 | + "_validation": json.dumps( |
| 117 | + dict( |
| 118 | + name=udf.name, |
| 119 | + pickled_code_path=remote_path, |
| 120 | + include_archive_path=include_py_libs, |
| 121 | + ) |
| 122 | + ), |
| 123 | + "_streaming_trigger_secs": str(validation_window_secs), |
| 124 | + } |
| 125 | + ) |
| 126 | + client.apply_feature_table(feature_table) |
0 commit comments