diff --git a/.gitbook.yaml b/.gitbook.yaml new file mode 100644 index 00000000000..48d28fbe8e2 --- /dev/null +++ b/.gitbook.yaml @@ -0,0 +1,4 @@ +root: ./docs/ + +structure: + readme: README.md \ No newline at end of file diff --git a/.gitignore b/.gitignore index afee5aa8eeb..8aaa0b69200 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ ### Scratch files ### scratch* +### Feast UI deprecated folder #### +ui/ + ### Local Environment ### *local*.env @@ -56,6 +59,7 @@ vendor __pycache__/ *.py[cod] *$py.class +*.prof # C extensions *.so @@ -63,6 +67,7 @@ __pycache__/ # Distribution / packaging .Python build/ +classes/ develop-eggs/ dist/ downloads/ diff --git a/.mvn/README.md b/.mvn/README.md new file mode 100644 index 00000000000..18ec0647585 --- /dev/null +++ b/.mvn/README.md @@ -0,0 +1,23 @@ +Project-specific Maven Configuration +==================================== + + + +maven.config +------------ + +Options to always give to the `mvn` CLI. This file doesn't support comments, +which is largely the reason this README exists. + +`--also-make` tells Maven to use [the Reactor] to recognize inter-module +dependencies and include them in the build whenever necessary. This is generally +desirable in a multi-module project. + +In addition to saving typing on the command line, using `--also-make` as a +default gets IntelliJ's main build & run functions working without failing to +resolve the inter-module deps or [running afoul of the Enforcer plugin][1], as +long as you use the ["Delegate IDE build/run actions to Maven"][2] setting. + +[the Reactor]: https://maven.apache.org/guides/mini/guide-multiple-modules.html +[1]: https://maven.apache.org/enforcer/enforcer-rules/reactorModuleConvergence.html +[2]: https://www.jetbrains.com/help/idea/delegate-build-and-run-actions-to-maven.html diff --git a/.mvn/maven.config b/.mvn/maven.config new file mode 100644 index 00000000000..e89445621b6 --- /dev/null +++ b/.mvn/maven.config @@ -0,0 +1 @@ +--also-make diff --git a/.prow/config.yaml b/.prow/config.yaml index fb0015e0916..c1305bf6ec2 100644 --- a/.prow/config.yaml +++ b/.prow/config.yaml @@ -21,7 +21,7 @@ plank: deck: tide_update_period: 1s spyglass: - size_limit: 100e+6 # 100MB + size_limit: 50e+6 # 50MB viewers: "started.json|finished.json": ["metadata"] "build-log.txt": ["buildlog"] @@ -50,132 +50,56 @@ tide: # presubmits list Prow jobs that run on pull requests presubmits: gojek/feast: - - name: unit-test-core + - name: test-core-and-ingestion decorate: true always_run: true spec: - volumes: - - name: service-account - secret: - secretName: prow-service-account containers: - image: maven:3.6-jdk-8 - volumeMounts: - - name: service-account - mountPath: /etc/service-account - readOnly: true - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /etc/service-account/service-account.json - command: [".prow/scripts/run_unit_test.sh", "--component", "core"] + command: [".prow/scripts/test-core-ingestion.sh"] - - name: unit-test-ingestion + - name: test-serving decorate: true always_run: true spec: - volumes: - - name: service-account - secret: - secretName: prow-service-account containers: - image: maven:3.6-jdk-8 - volumeMounts: - - name: service-account - mountPath: /etc/service-account - readOnly: true - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /etc/service-account/service-account.json - command: [".prow/scripts/run_unit_test.sh", "--component", "ingestion"] + command: [".prow/scripts/test-serving.sh"] - - name: unit-test-serving + - name: test-java-sdk decorate: true always_run: true spec: containers: - image: maven:3.6-jdk-8 - command: [".prow/scripts/run_unit_test.sh", "--component", "serving"] + command: [".prow/scripts/test-java-sdk.sh"] - - name: unit-test-cli + - name: test-python-sdk decorate: true always_run: true spec: containers: - - image: golang:1.12 - env: - - name: GO111MODULE - value: "on" - command: [".prow/scripts/run_unit_test.sh", "--component", "cli"] + - image: python:3.7 + command: [".prow/scripts/test-python-sdk.sh"] - - name: unit-test-python-sdk + - name: test-golang-sdk decorate: true always_run: true spec: - volumes: - - name: service-account - secret: - secretName: prow-service-account containers: - - image: python:3.6 - volumeMounts: - - name: service-account - mountPath: /etc/service-account - readOnly: true - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /etc/service-account/service-account.json - command: [".prow/scripts/run_unit_test.sh", "--component", "python-sdk"] + - image: golang:1.13 + command: [".prow/scripts/test-golang-sdk.sh"] - - name: integration-test + - name: test-end-to-end decorate: true always_run: true spec: - volumes: - - name: docker-socket-volume - hostPath: - path: /var/run/docker.sock - type: File - - name: service-account - secret: - secretName: prow-service-account - nodeSelector: - os: ubuntu containers: - - image: google/cloud-sdk - # securityContext and docker socket vol mounts are needed because we are building - # Docker images in this job - securityContext: - privileged: true - volumeMounts: - - name: docker-socket-volume - mountPath: /var/run/docker.sock - - name: service-account - mountPath: /etc/service-account - readOnly: true - command: - - bash - - -c - - | - export FEAST_HOME=${PWD} - export FEAST_IMAGE_REGISTRY=us.gcr.io - export FEAST_IMAGE_TAG=${PULL_PULL_SHA} - export FEAST_WAREHOUSE_DATASET=feast_build_${BUILD_ID} - export FEAST_CORE_URL=build-${BUILD_ID:0:5}.drone.feast.ai:80 - export FEAST_SERVING_URL=build-${BUILD_ID:0:5}.drone.feast.ai:80 - export FEAST_RELEASE_NAME=feast-${BUILD_ID:0:5} - export BATCH_IMPORT_DATA_GCS_PATH=gs://feast-templocation-kf-feast/build_${BUILD_ID:0:5}/integration-tests/testdata/feature_values/ingestion_1.csv - export KAFKA_BROKERS=10.128.0.201:9092 - export KAFKA_TOPICS=feast_build_${BUILD_ID:0:5} - - . .prow/scripts/prepare_integration_test.sh - .prow/scripts/install_feast_and_run_e2e_test.sh - TEST_EXIT_CODE=$? - .prow/scripts/cleanup_feast_installation.sh - - exit ${TEST_EXIT_CODE} + - image: maven:3.6-jdk-8 + command: [".prow/scripts/test-end-to-end.sh"] # TODO: do a release when a git tag is pushed -# postsubmits list Prow jobs that run on every push # +# postsubmits list Prow jobs that run on every push # postsubmits: -# gojek/feast: +# gojek/feast: \ No newline at end of file diff --git a/.prow/scripts/cleanup_feast_installation.sh b/.prow/scripts/cleanup_feast_installation.sh deleted file mode 100755 index 78b7d83a327..00000000000 --- a/.prow/scripts/cleanup_feast_installation.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -e - -bq -q rm -rf --dataset ${FEAST_WAREHOUSE_DATASET} -gsutil -q rm ${BATCH_IMPORT_DATA_GCS_PATH} -helm delete --purge $FEAST_RELEASE_NAME diff --git a/.prow/scripts/prepare_maven_cache.sh b/.prow/scripts/download-maven-cache.sh similarity index 79% rename from .prow/scripts/prepare_maven_cache.sh rename to .prow/scripts/download-maven-cache.sh index e5113389426..c9704878e0a 100755 --- a/.prow/scripts/prepare_maven_cache.sh +++ b/.prow/scripts/download-maven-cache.sh @@ -23,5 +23,11 @@ done if [[ ! ${ARCHIVE_URI} ]]; then usage; exit 1; fi if [[ ! ${OUTPUT_DIR} ]]; then usage; exit 1; fi +# Install Google Cloud SDK if gsutil command not exists +if [[ ! $(command -v gsutil) ]]; then + CURRENT_DIR=$(dirname "$BASH_SOURCE") + . "${CURRENT_DIR}"/install_google_cloud_sdk.sh +fi + gsutil -q cp ${ARCHIVE_URI} /tmp/.m2.tar tar xf /tmp/.m2.tar -C ${OUTPUT_DIR} diff --git a/.prow/scripts/install_feast_and_run_e2e_test.sh b/.prow/scripts/install_feast_and_run_e2e_test.sh deleted file mode 100755 index 029f2e3e31a..00000000000 --- a/.prow/scripts/install_feast_and_run_e2e_test.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env bash - -set -e - -echo "============================================================" -echo "Installing Feast Release" -echo "============================================================" - -helm install --name ${FEAST_RELEASE_NAME} --wait --timeout 210 ${FEAST_HOME}/charts/feast -f integration-tests/feast-helm-values.yaml - -echo "============================================================" -echo "Testing Batch Import" -echo "============================================================" - -cd ${FEAST_HOME}/integration-tests/testdata - -feast apply entity entity_specs/entity_1.yaml -feast apply feature feature_specs/entity_1*.yaml -feast jobs run import_specs/batch_from_gcs.yaml --wait - -cd $FEAST_HOME/integration-tests - -python -m testutils.validate_feature_values \ - --entity_spec_file=testdata/entity_specs/entity_1.yaml \ - --feature_spec_files=testdata/feature_specs/entity_1*.yaml \ - --expected-warehouse-values-file=testdata/feature_values/ingestion_1.csv \ - --expected-serving-values-file=testdata/feature_values/serving_1.csv \ - --bigquery-dataset-for-warehouse=${FEAST_WAREHOUSE_DATASET} \ - --feast-serving-url=${FEAST_SERVING_URL} - -echo "============================================================" -echo "Testing Streaming Import" -echo "============================================================" - -cd $FEAST_HOME/integration-tests/testdata - -feast apply entity entity_specs/entity_2.yaml -feast apply feature feature_specs/entity_2*.yaml -feast jobs run import_specs/stream_from_kafka.yaml & - -IMPORT_JOB_PID=$! -sleep 20 - -cd $FEAST_HOME/integration-tests - -python -m testutils.kafka_producer \ - --bootstrap_servers=$KAFKA_BROKERS \ - --topic=$KAFKA_TOPICS \ - --entity_spec_file=testdata/entity_specs/entity_2.yaml \ - --feature_spec_files=testdata/feature_specs/entity_2*.yaml \ - --feature_values_file=testdata/feature_values/ingestion_2.csv -sleep 20 - -python -m testutils.validate_feature_values \ - --entity_spec_file=testdata/entity_specs/entity_2.yaml \ - --feature_spec_files=testdata/feature_specs/entity_2*.yaml \ - --expected-serving-values-file=testdata/feature_values/serving_2.csv \ - --feast-serving-url=$FEAST_SERVING_URL - -kill -9 ${IMPORT_JOB_PID} diff --git a/.prow/scripts/install_feast_sdk.sh b/.prow/scripts/install_feast_sdk.sh deleted file mode 100755 index 723199b9e51..00000000000 --- a/.prow/scripts/install_feast_sdk.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -set -e - -# This script ensures latest Feast Python SDK and Feast CLI are installed - -pip install -qe ${FEAST_HOME}/sdk/python -pip install -qr ${FEAST_HOME}/integration-tests/testutils/requirements.txt -go build -o /usr/local/bin/feast ./cli/feast &> /dev/null -feast config set coreURI ${FEAST_CORE_URL} diff --git a/.prow/scripts/install_google_cloud_sdk.sh b/.prow/scripts/install_google_cloud_sdk.sh index 0865f6085c3..c6356557cec 100755 --- a/.prow/scripts/install_google_cloud_sdk.sh +++ b/.prow/scripts/install_google_cloud_sdk.sh @@ -23,14 +23,14 @@ while [ "$1" != "" ]; do shift done -GOOGLE_CLOUD_SDK_ARCHIVE_URL=https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-244.0.0-linux-x86_64.tar.gz +GOOGLE_CLOUD_SDK_ARCHIVE_URL=https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-266.0.0-linux-x86_64.tar.gz GOOGLE_PROJECT_ID=kf-feast KUBE_CLUSTER_NAME=primary-test-cluster KUBE_CLUSTER_ZONE=us-central1-a curl -s ${GOOGLE_CLOUD_SDK_ARCHIVE_URL} | tar xz -C / export PATH=/google-cloud-sdk/bin:${PATH} -gcloud -q components install kubectl +gcloud -q components install kubectl &> /var/log/kubectl.install.log if [[ ${KEY_FILE} ]]; then gcloud -q auth activate-service-account --key-file=${KEY_FILE} diff --git a/.prow/scripts/install_test_tools.sh b/.prow/scripts/install_test_tools.sh deleted file mode 100755 index a2e30bbc3a2..00000000000 --- a/.prow/scripts/install_test_tools.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash -set -e - -# This script installs the following Feast test utilities: -# ============================================================ -# - gettext package so we can use envsubst command to provide values to helm template file -# - Python 3.6 because Feast requires Python version 3.6 and above -# - Golang if we need to build Feast CLI from source -# - Helm if we want to install Feast release - -apt-get -qq update -apt-get -y install curl wget gettext &> /dev/null - -curl -s https://repo.continuum.io/miniconda/Miniconda3-4.5.12-Linux-x86_64.sh -o /tmp/miniconda.sh -bash /tmp/miniconda.sh -b -p /miniconda &> /dev/null -export PATH=/miniconda/bin:$PATH - -wget -qO- https://dl.google.com/go/go1.12.5.linux-amd64.tar.gz | tar xzf - -mv go /usr/local/ -export PATH=/usr/local/go/bin:$PATH -export GO111MODULE=on - -wget -qO- https://storage.googleapis.com/kubernetes-helm/helm-v2.13.1-linux-amd64.tar.gz | tar xz -mv linux-amd64/helm /usr/local/bin/helm diff --git a/.prow/scripts/prepare_integration_test.sh b/.prow/scripts/prepare_integration_test.sh deleted file mode 100755 index 1a08f26475b..00000000000 --- a/.prow/scripts/prepare_integration_test.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env bash -set -e - -usage() -{ - echo "usage: prepare_integration_test.sh [--skip-build true]" -} - -while [ "$1" != "" ]; do - case "$1" in - --skip-build ) SKIP_BUILD=true; shift;; - * ) usage; exit 1 - esac - shift -done - -# Authenticate to Google Cloud and GKE -# ============================================================ -GOOGLE_PROJECT_ID=kf-feast -KUBE_CLUSTER_NAME=primary-test-cluster -KUBE_CLUSTER_ZONE=us-central1-a -KEY_FILE=/etc/service-account/service-account.json - -gcloud -q auth activate-service-account --key-file=${KEY_FILE} -gcloud -q auth configure-docker -gcloud -q config set project ${GOOGLE_PROJECT_ID} -gcloud -q container clusters get-credentials ${KUBE_CLUSTER_NAME} --zone ${KUBE_CLUSTER_ZONE} --project ${GOOGLE_PROJECT_ID} -export GOOGLE_APPLICATION_CREDENTIALS=${KEY_FILE} - -# Install Python 3.6, Golang 1.12, Helm and Feast SDK -# ============================================================ -. .prow/scripts/install_test_tools.sh -. .prow/scripts/install_feast_sdk.sh -.prow/scripts/prepare_maven_cache.sh --archive-uri gs://feast-templocation-kf-feast/.m2.tar --output-dir ${FEAST_HOME} - -# Prepare Feast test data and config -# ============================================================ - -bq -q mk --dataset ${FEAST_WAREHOUSE_DATASET} -gsutil -q cp ${FEAST_HOME}/integration-tests/testdata/feature_values/ingestion_1.csv ${BATCH_IMPORT_DATA_GCS_PATH} - -BUILD_ID=${BUILD_ID:0:5} -envsubst < integration-tests/feast-helm-values.yaml.template > integration-tests/feast-helm-values.yaml -cd ${FEAST_HOME}/integration-tests/testdata/import_specs -envsubst < batch_from_gcs.yaml.template > batch_from_gcs.yaml -envsubst < stream_from_kafka.yaml.template > stream_from_kafka.yaml - -if [[ ! ${SKIP_BUILD} ]]; then - -echo "============================================================" -echo "Building Feast for Testing" -echo "============================================================" -cd ${FEAST_HOME} -docker build -t us.gcr.io/kf-feast/feast-core:${FEAST_IMAGE_TAG} -f Dockerfiles/core/Dockerfile . & -docker build -t us.gcr.io/kf-feast/feast-serving:${FEAST_IMAGE_TAG} -f Dockerfiles/serving/Dockerfile . & -wait -docker push us.gcr.io/kf-feast/feast-core:${FEAST_IMAGE_TAG} & -docker push us.gcr.io/kf-feast/feast-serving:${FEAST_IMAGE_TAG} & -wait - -fi - -# Switch back context to original directory -set +ex -cd ${FEAST_HOME} \ No newline at end of file diff --git a/.prow/scripts/run_unit_test.sh b/.prow/scripts/run_unit_test.sh deleted file mode 100755 index 1388749586a..00000000000 --- a/.prow/scripts/run_unit_test.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env bash - -# This script will run unit test for a specific Feast component: -# - core, ingestion, serving or cli -# -# This script includes the pre and post test scripts, such as -# - downloading maven cache repository -# - saving the test output report so it can be viewed with Spyglass in Prow - -usage() -{ - echo "usage: run_unit_test.sh - --component {core, ingestion, serving, cli}" -} - -while [ "$1" != "" ]; do - case "$1" in - --component ) COMPONENT="$2"; shift;; - * ) usage; exit 1 - esac - shift -done - -if [[ ! ${COMPONENT} ]]; then - usage; exit 1; -fi - -. .prow/scripts/install_google_cloud_sdk.sh - -if [[ ${COMPONENT} == "core" ]] || [[ ${COMPONENT} == "ingestion" ]] || [[ ${COMPONENT} == "serving" ]]; then - - .prow/scripts/prepare_maven_cache.sh --archive-uri gs://feast-templocation-kf-feast/.m2.tar --output-dir /root/ - mvn --projects ${COMPONENT} test - TEST_EXIT_CODE=$? - cp -r ${COMPONENT}/target/surefire-reports /logs/artifacts/surefire-reports - -elif [[ ${COMPONENT} == "cli" ]]; then - - # https://stackoverflow.com/questions/6871859/piping-command-output-to-tee-but-also-save-exit-code-of-command - set -o pipefail - - go get -u github.com/jstemmer/go-junit-report - go test -v ./cli/feast/... 2>&1 | tee test_output - TEST_EXIT_CODE=$? - cat test_output | ${GOPATH}/bin/go-junit-report > ${ARTIFACTS}/unittest-cli-report.xml - -elif [[ ${COMPONENT} == "python-sdk" ]]; then - - cd sdk/python - pip install -r requirements-test.txt - pip install . - pytest ./tests --junitxml=${ARTIFACTS}/unittest-pythonsdk-report.xml - TEST_EXIT_CODE=$? - -else - usage; exit 1 -fi - -exit ${TEST_EXIT_CODE} diff --git a/.prow/scripts/sync-helm-charts.sh b/.prow/scripts/sync-helm-charts.sh new file mode 100755 index 00000000000..918979dd3ff --- /dev/null +++ b/.prow/scripts/sync-helm-charts.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +# Script to sync local charts to remote helm repository in Google Cloud Storage +# Copied from: https://github.com/helm/charts/blob/master/test/repo-sync.sh + +set -o errexit +set -o nounset +set -o pipefail + +log_error() { + printf '\e[31mERROR: %s\n\e[39m' "$1" >&2 +} + +# Assume working directory is Feast repository root folder +repo_dir=infra/charts +bucket=gs://feast-charts +repo_url=https://feast-charts.storage.googleapis.com/ +sync_dir=/tmp/syncdir +index_dir=/tmp/indexdir + +rm -rf $sync_dir $index_dir + +echo "Syncing repo '$repo_dir'..." + +mkdir -p "$sync_dir" +if ! gsutil cp "$bucket/index.yaml" "$index_dir/index.yaml"; then + log_error "Exiting because unable to copy index locally. Not safe to proceed." + exit 1 +fi + +exit_code=0 + +for dir in "$repo_dir"/*; do + if helm dependency build "$dir"; then + helm package --destination "$sync_dir" "$dir" + else + log_error "Problem building dependencies. Skipping packaging of '$dir'." + exit_code=1 + fi +done + +if helm repo index --url "$repo_url" --merge "$index_dir/index.yaml" "$sync_dir"; then + # Move updated index.yaml to sync folder so we don't push the old one again + mv -f "$sync_dir/index.yaml" "$index_dir/index.yaml" + + gsutil -m rsync "$sync_dir" "$bucket" + + # Make sure index.yaml is synced last + gsutil cp "$index_dir/index.yaml" "$bucket" +else + log_error "Exiting because unable to update index. Not safe to push update." + exit 1 +fi + +ls -l "$sync_dir" + +exit "$exit_code" \ No newline at end of file diff --git a/.prow/scripts/test-core-ingestion.sh b/.prow/scripts/test-core-ingestion.sh new file mode 100755 index 00000000000..98a47ca68c9 --- /dev/null +++ b/.prow/scripts/test-core-ingestion.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +.prow/scripts/download-maven-cache.sh \ + --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ + --output-dir /root/ + +# Core depends on Ingestion so they are tested together +# Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level +mvn --projects core,ingestion --batch-mode --define skipTests=true \ + --define enforcer.skip=true clean install +mvn --projects core,ingestion --define enforcer.skip=true test +TEST_EXIT_CODE=$? + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts +mkdir -p ${LOGS_ARTIFACT_PATH}/surefire-reports +cp core/target/surefire-reports/* ${LOGS_ARTIFACT_PATH}/surefire-reports/ +cp ingestion/target/surefire-reports/* ${LOGS_ARTIFACT_PATH}/surefire-reports/ + +exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/.prow/scripts/test-end-to-end.sh b/.prow/scripts/test-end-to-end.sh new file mode 100755 index 00000000000..fde251097f5 --- /dev/null +++ b/.prow/scripts/test-end-to-end.sh @@ -0,0 +1,213 @@ +#!/usr/bin/env bash + +set -e +set -o pipefail + +if ! cat /etc/*release | grep -q stretch; then + echo ${BASH_SOURCE} only supports Debian stretch. + echo Please change your operating system to use this script. + exit 1 +fi + +echo " +This script will run end-to-end tests for Feast Core and Online Serving. + +1. Install Redis as the store for Feast Online Serving. +2. Install Postgres for persisting Feast metadata. +3. Install Kafka and Zookeeper as the Source in Feast. +4. Install Python 3.7.4, Feast Python SDK and run end-to-end tests from + tests/e2e via pytest. +" + +echo " +============================================================ +Installing Redis at localhost:6379 +============================================================ +" +apt-get -qq update +# Allow starting serving in this Maven Docker image. Default set to not allowed. +echo "exit 0" > /usr/sbin/policy-rc.d +apt-get -y install redis-server wget > /var/log/redis.install.log +redis-server --daemonize yes +redis-cli ping + +echo " +============================================================ +Installing Postgres at localhost:5432 +============================================================ +" +apt-get -y install postgresql > /var/log/postgresql.install.log +service postgresql start +# Initialize with database: 'postgres', user: 'postgres', password: 'password' +cat < /tmp/update-postgres-role.sh +psql -c "ALTER USER postgres PASSWORD 'password';" +EOF +chmod +x /tmp/update-postgres-role.sh +su -s /bin/bash -c /tmp/update-postgres-role.sh postgres +export PGPASSWORD=password +pg_isready + +echo " +============================================================ +Installing Zookeeper at localhost:2181 +Installing Kafka at localhost:9092 +============================================================ +" +wget -qO- https://www-eu.apache.org/dist/kafka/2.3.0/kafka_2.12-2.3.0.tgz | tar xz +mv kafka_2.12-2.3.0/ /tmp/kafka +nohup /tmp/kafka/bin/zookeeper-server-start.sh /tmp/kafka/config/zookeeper.properties &> /var/log/zookeeper.log 2>&1 & +sleep 5 +tail -n10 /var/log/zookeeper.log +nohup /tmp/kafka/bin/kafka-server-start.sh /tmp/kafka/config/server.properties &> /var/log/kafka.log 2>&1 & +sleep 5 +tail -n10 /var/log/kafka.log + +echo " +============================================================ +Building jars for Feast +============================================================ +" + +.prow/scripts/download-maven-cache.sh \ + --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ + --output-dir /root/ + +# Build jars for Feast +mvn --quiet --batch-mode --define skipTests=true clean package + +echo " +============================================================ +Starting Feast Core +============================================================ +" +# Start Feast Core in background +cat < /tmp/core.application.yml +grpc: + port: 6565 + enable-reflection: true + +feast: + version: 0.3 + jobs: + runner: DirectRunner + options: {} + metrics: + enabled: false + + stream: + type: kafka + options: + topic: feast-features + bootstrapServers: localhost:9092 + replicationFactor: 1 + partitions: 1 + +spring: + jpa: + properties.hibernate.format_sql: true + hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl + hibernate.ddl-auto: update + datasource: + url: jdbc:postgresql://localhost:5432/postgres + username: postgres + password: password + +management: + metrics: + export: + simple: + enabled: false + statsd: + enabled: false +EOF + +nohup java -jar core/target/feast-core-0.3.0-SNAPSHOT.jar \ + --spring.config.location=file:///tmp/core.application.yml \ + &> /var/log/feast-core.log & +sleep 20 +tail -n10 /var/log/feast-core.log + +echo " +============================================================ +Starting Feast Online Serving +============================================================ +" +# Start Feast Online Serving in background +cat < /tmp/serving.store.redis.yml +name: serving +type: REDIS +redis_config: + host: localhost + port: 6379 +subscriptions: + - name: "*" + version: ">0" +EOF + +cat < /tmp/serving.online.application.yml +feast: + version: 0.3 + core-host: localhost + core-grpc-port: 6565 + + tracing: + enabled: false + + store: + config-path: /tmp/serving.store.redis.yml + redis-pool-max-size: 128 + redis-pool-max-idle: 16 + + jobs: + staging-location: gs://feast-templocation-kf-feast/staging-location + store-type: + store-options: {} + +grpc: + port: 6566 + enable-reflection: true + +spring: + main: + web-environment: false +EOF + +nohup java -jar serving/target/feast-serving-0.3.0-SNAPSHOT.jar \ + --spring.config.location=file:///tmp/serving.online.application.yml \ + &> /var/log/feast-serving-online.log & +sleep 15 +tail -n10 /var/log/feast-serving-online.log + +echo " +============================================================ +Installing Python 3.7 with Miniconda and Feast SDK +============================================================ +" +# Install Python 3.7 with Miniconda +wget -q https://repo.continuum.io/miniconda/Miniconda3-4.7.12-Linux-x86_64.sh \ + -O /tmp/miniconda.sh +bash /tmp/miniconda.sh -b -p /root/miniconda -f +/root/miniconda/bin/conda init +source ~/.bashrc + +# Install Feast Python SDK and test requirements +pip install -q sdk/python +pip install -qr tests/e2e/requirements.txt + +echo " +============================================================ +Running end-to-end tests with pytest at 'tests/e2e' +============================================================ +" +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts + +ORIGINAL_DIR=$(pwd) +cd tests/e2e + +set +e +pytest --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +TEST_EXIT_CODE=$? + +cd ${ORIGINAL_DIR} +exit ${TEST_EXIT_CODE} diff --git a/.prow/scripts/test-golang-sdk.sh b/.prow/scripts/test-golang-sdk.sh new file mode 100755 index 00000000000..b586927a512 --- /dev/null +++ b/.prow/scripts/test-golang-sdk.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +set -o pipefail + +cd sdk/go +go test -v 2>&1 | tee /tmp/test_output +TEST_EXIT_CODE=$? + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts + +go get -u github.com/jstemmer/go-junit-report +cat /tmp/test_output | ${GOPATH}/bin/go-junit-report > ${LOGS_ARTIFACT_PATH}/golang-sdk-test-report.xml + +exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/.prow/scripts/test-java-sdk.sh b/.prow/scripts/test-java-sdk.sh new file mode 100755 index 00000000000..0731b77976f --- /dev/null +++ b/.prow/scripts/test-java-sdk.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +# Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level +mvn --projects sdk/java --batch-mode --define skipTests=true \ + --define enforcer.skip=true clean install +mvn --projects sdk/java --define enforcer.skip=true test +TEST_EXIT_CODE=$? + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts +cp -r sdk/java/target/surefire-reports ${LOGS_ARTIFACT_PATH}/surefire-reports + +exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/.prow/scripts/test-python-sdk.sh b/.prow/scripts/test-python-sdk.sh new file mode 100755 index 00000000000..eb40f921c7b --- /dev/null +++ b/.prow/scripts/test-python-sdk.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -e + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts + +cd sdk/python +pip install -r requirements-test.txt +pip install . +pytest --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml diff --git a/.prow/scripts/test-serving.sh b/.prow/scripts/test-serving.sh new file mode 100755 index 00000000000..b56001619b3 --- /dev/null +++ b/.prow/scripts/test-serving.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +.prow/scripts/download-maven-cache.sh \ + --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ + --output-dir /root/ + +mvn --batch-mode --also-make --projects serving test +TEST_EXIT_CODE=$? + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts +cp -r serving/target/surefire-reports ${LOGS_ARTIFACT_PATH}/surefire-reports + +exit ${TEST_EXIT_CODE} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a80d21b6f92..fcee6f3a39b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,306 @@ # Contributing Guide +## Getting Started + +The following guide will help you quickly run Feast in your local machine. + +The main components of Feast are: +- **Feast Core** handles FeatureSpec registration, starts and monitors Ingestion + jobs and ensures that Feast internal metadata is consistent. +- **Feast Ingestion** subscribes to streams of FeatureRow and writes the feature + values to registered Stores. +- **Feast Serving** handles requests for features values retrieval from the end users. + +![Feast Components Overview](docs/assets/feast-components-overview.png) + +**Pre-requisites** +- Java SDK version 8 +- Python version 3.6 (or above) and pip +- Access to Postgres database (version 11 and above) +- Access to [Redis](https://redis.io/topics/quickstart) instance (tested on version 5.x) +- Access to [Kafka](https://kafka.apache.org/) brokers (tested on version 2.x) +- [Maven ](https://maven.apache.org/install.html) version 3.6.x +- [grpc_cli](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md) + is useful for debugging and quick testing +- An overview of Feast specifications and [protos](./protos/feast) + +> **Assumptions:** +> +> 1. Postgres is running in "localhost:5432" and has a database called "postgres" which +> can be accessed with credentials user "postgres" and password "password". +> To use different database name and credentials, please update +> "$FEAST_HOME/core/src/main/resources/application.yml" +> or set these environment variables: DB_HOST, DB_USERNAME, DB_PASSWORD. +> 2. Redis is running locally and accessible from "localhost:6379" +> 3. Feast has admin access to BigQuery. + + +``` +# Clone Feast branch 0.3-dev +# $FEAST_HOME will refer to be the root directory of this Feast Git repository + +git clone -b 0.3-dev https://github.com/gojek/feast +cd feast +``` + +#### Starting Feast Core + +``` +# Please check the default configuration for Feast Core in +# "$FEAST_HOME/core/src/main/resources/application.yml" and update it accordingly. +# +# Start Feast Core GRPC server on localhost:6565 +mvn --projects core spring-boot:run + +# If Feast Core starts successfully, verify the correct Stores are registered +# correctly, for example by using grpc_cli. +grpc_cli call localhost:6565 GetStores '' + +# Should return something similar to the following. +# Note that you should change BigQuery projectId and datasetId accordingly +# in "$FEAST_HOME/core/src/main/resources/application.yml" + +store { + name: "SERVING" + type: REDIS + subscriptions { + name: "*" + version: ">0" + } + redis_config { + host: "localhost" + port: 6379 + } +} +store { + name: "WAREHOUSE" + type: BIGQUERY + subscriptions { + name: "*" + version: ">0" + } + bigquery_config { + project_id: "my-google-project-id" + dataset_id: "my-bigquery-dataset-id" + } +} +``` + +#### Starting Feast Serving + +Feast Serving requires administrators to provide an **existing** store name in Feast. +An instance of Feast Serving can only retrieve features from a **single** store. +> In order to retrieve features from multiple stores you must start **multiple** +instances of Feast serving. If you start multiple Feast serving on a single host, +make sure that they are listening on different ports. + +``` +# Start Feast Serving GRPC server on localhost:6566 with store name "SERVING" +mvn --projects serving spring-boot:run -Dspring-boot.run.arguments='--feast.store-name=SERVING' + +# To verify Feast Serving starts successfully +grpc_cli call localhost:6566 GetFeastServingType '' + +# Should return something similar to the following. +type: FEAST_SERVING_TYPE_ONLINE +``` + + +#### Registering a FeatureSet + +Create a new FeatureSet on Feast by sending a request to Feast Core. When a +feature set is successfully registered, Feast Core will start an **ingestion** job +that listens for new features in the FeatureSet. Note that Feast currently only +supports source of type "KAFKA", so you must have access to a running Kafka broker +to register a FeatureSet successfully. + +``` +# Example of registering a new driver feature set +# Note the source value, it assumes that you have access to a Kafka broker +# running on localhost:9092 + +grpc_cli call localhost:6565 ApplyFeatureSet ' +feature_set { + name: "driver" + version: 1 + + entities { + name: "driver_id" + value_type: INT64 + } + + features { + name: "city" + value_type: STRING + } + + source { + type: KAFKA + kafka_source_config { + bootstrap_servers: "localhost:9092" + } + } +} +' + +# To check that the FeatureSet has been registered correctly. +# You should also see logs from Feast Core of the ingestion job being started +grpc_cli call localhost:6565 GetFeatureSets '' +``` + + +#### Ingestion and Population of Feature Values + +``` +# Produce FeatureRow messages to Kafka so it will be ingested by Feast +# and written to the registered stores. +# Make sure the value here is the topic assigned to the feature set +# ... producer.send("feast-driver-features" ...) +# +# Install Python SDK to help writing FeatureRow messages to Kafka +cd $FEAST_HOME/sdk/python +pip3 install -e . +pip3 install pendulum + +# Produce FeatureRow messages to Kafka so it will be ingested by Feast +# and written to the corresponding store. +# Make sure the value here is the topic assigned to the feature set +# ... producer.send("feast-test_feature_set-features" ...) +python3 - < Tool Windows > Maven` +1. Drill down to e.g. `Feast Core > Plugins > spring-boot:run`, right-click and `Create 'feast-core [spring-boot'…` +1. In the dialog that pops up, check the `Resolve Workspace artifacts` box +1. Click `OK`. You should now be able to select this run configuration for the Play button in the main toolbar, keyboard shortcuts, etc. + +[idea-boot-main]: https://stackoverflow.com/questions/30237768/run-spring-boots-main-using-ide + +#### Tips for Running Postgres, Redis and Kafka with Docker + +This guide assumes you are running Docker service on a bridge network (which +is usually the case if you're running Linux). Otherwise, you may need to +use different network options than shown below. + +> `--net host` usually only works as expected when you're running Docker +> service in bridge networking mode. + +``` +# Start Postgres +docker run --name postgres --rm -it -d --net host -e POSTGRES_DB=postgres -e POSTGRES_USER=postgres \ +-e POSTGRES_PASSWORD=password postgres:12-alpine + +# Start Redis +docker run --name redis --rm -it --net host -d redis:5-alpine + +# Start Zookeeper (needed by Kafka) +docker run --rm \ + --net=host \ + --name=zookeeper \ + --env=ZOOKEEPER_CLIENT_PORT=2181 \ + --detach confluentinc/cp-zookeeper:5.2.1 + +# Start Kafka +docker run --rm \ + --net=host \ + --name=kafka \ + --env=KAFKA_ZOOKEEPER_CONNECT=localhost:2181 \ + --env=KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ + --env=KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \ + --detach confluentinc/cp-kafka:5.2.1 +``` + ## Code reviews Code submission to Feast (including submission from project maintainers) requires review and approval. @@ -11,13 +312,9 @@ Please submit a **pull request** to initiate the code review process. We use [pr We conform to the [java google style guide](https://google.github.io/styleguide/javaguide.html) -If using intellij please import the code styles: +If using Intellij please import the code styles: https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml ### Go Make sure you apply `go fmt`. - -### JavaScript - -TODO \ No newline at end of file diff --git a/Dockerfiles/README.md b/Dockerfiles/README.md deleted file mode 100644 index 30404ce4c54..00000000000 --- a/Dockerfiles/README.md +++ /dev/null @@ -1 +0,0 @@ -TODO \ No newline at end of file diff --git a/Dockerfiles/core/Dockerfile b/Dockerfiles/core/Dockerfile deleted file mode 100644 index d03657c2823..00000000000 --- a/Dockerfiles/core/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM maven:3.6-jdk-8-slim as builder -ARG REVISION=dev -COPY . /build -WORKDIR /build -ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false" -RUN mvn --projects core,ingestion -Drevision=$REVISION -DskipTests=true --batch-mode package - -FROM openjdk:8-jre as production -ARG REVISION=dev -COPY --from=builder /build/core/target/feast-core-$REVISION.jar /usr/share/feast/feast-core.jar -COPY --from=builder /build/ingestion/target/feast-ingestion-$REVISION.jar /usr/share/feast/feast-ingestion.jar -ENV JOB_EXECUTABLE=/usr/share/feast/feast-ingestion.jar -ENTRYPOINT ["java", \ -"-XX:+UnlockExperimentalVMOptions", \ -"-XX:+UseCGroupMemoryLimitForHeap", \ -"-jar", "/usr/share/feast/feast-core.jar"] diff --git a/Dockerfiles/serving/Dockerfile b/Dockerfiles/serving/Dockerfile deleted file mode 100644 index f243a649091..00000000000 --- a/Dockerfiles/serving/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM maven:3.6-jdk-8-slim as builder -ARG REVISION=dev -COPY . /build -WORKDIR /build -ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false" -RUN mvn --projects serving -Drevision=$REVISION -DskipTests=true --batch-mode package - -FROM openjdk:8-jre-alpine as production -ARG REVISION=dev -COPY --from=builder /build/serving/target/feast-serving-$REVISION.jar /usr/share/feast/feast-serving.jar -ENTRYPOINT ["java", \ - "-XX:+UseG1GC", \ - "-XX:+UseStringDeduplication", \ - "-XX:+UnlockExperimentalVMOptions", \ - "-XX:+UseCGroupMemoryLimitForHeap", \ - "-jar", "/usr/share/feast/feast-serving.jar"] diff --git a/Makefile b/Makefile index 84b787c2c01..e527ddd8b70 100644 --- a/Makefile +++ b/Makefile @@ -14,9 +14,6 @@ # limitations under the License. # -VERSION_FILE = VERSION -FEAST_VERSION = `cat $(VERSION_FILE)` - test: mvn test @@ -25,19 +22,21 @@ test-integration: build-proto: $(MAKE) -C protos gen-go + $(MAKE) -C protos gen-python + $(MAKE) -C protos gen-docs build-cli: $(MAKE) build-proto $(MAKE) -C cli build-all build-java: - mvn clean verify -Drevision=$(FEAST_VERSION) + mvn clean verify -Drevision=$(VERSION) build-docker: - docker build -t $(registry)/feast-core:$(version) -f docker/core/Dockerfile . - docker build -t $(registry)/feast-serving:$(version) -f docker/serving/Dockerfile . + docker build -t $(REGISTRY)/feast-core:$(VERSION) -f infra/docker/core/Dockerfile . + docker build -t $(REGISTRY)/feast-serving:$(VERSION) -f infra/docker/serving/Dockerfile . build-push-docker: - @$(MAKE) build-docker registry=$(registry) version=$(version) - docker push $(registry)/feast-core:$(version) - docker push $(registry)/feast-serving:$(version) \ No newline at end of file + @$(MAKE) build-docker registry=$(REGISTRY) version=$(VERSION) + docker push $(REGISTRY)/feast-core:$(VERSION) + docker push $(REGISTRY)/feast-serving:$(VERSION) \ No newline at end of file diff --git a/OWNERS b/OWNERS index a994c5e4691..52918fbf194 100644 --- a/OWNERS +++ b/OWNERS @@ -2,17 +2,10 @@ approvers: - zhilingc - pradithya - woop - - tims - thirteen37 - davidheryanto - - budi - - romanwozniak reviewers: - zhilingc - - pradithya - woop - - tims - thirteen37 - davidheryanto - - budi - - romanwozniak diff --git a/README.md b/README.md index ad88a861dee..188c1d8899f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Feast - Feature Store for Machine Learning [![Build Status](http://prow.feast.ai/badge.svg?jobs=integration-test)](http://prow.feast.ai) +# Feast - Feature Store for Machine Learning ## Overview @@ -13,7 +13,7 @@ It aims to: ## High Level Architecture -![Feast Architecture](docs/architecture.png) +![Feast Architecture](docs/assets/arch.png) The Feast platform is broken down into the following functional areas: @@ -35,21 +35,6 @@ __Feature standardisation__: Feast presents a centralized platform on which team __Discovery__: Feast allows users to easily explore and discover features and their associated information. This allows for a deeper understanding of features and theirs specifications, more feature reuse between teams and projects, and faster experimentation. Each new ML project can leverage features that have been created by prior teams, which compounds an organization's ability to discover new insights. -## More Information - -* [Components](docs/components.md) -* [Concepts](docs/concepts.md) - -For Feast administrators: -* [Installation quickstart](docs/install.md) -* [Helm charts](charts/README.md) details - -For Feast end users: -* [Creating features](docs/endusers.md) - -For Feast developers: -* [Building the CLI](cli/README.md) - ## Notice Feast is still under active development. Your feedback and contributions are important to us. Please check our [contributing guide](CONTRIBUTING.md) for details. @@ -63,7 +48,7 @@ doesn't comply with the license.) Apache header: - Copyright 2018 The Feast Authors + Copyright 2019 The Feast Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/VERSION b/VERSION deleted file mode 100644 index 6c6aa7cb091..00000000000 --- a/VERSION +++ /dev/null @@ -1 +0,0 @@ -0.1.0 \ No newline at end of file diff --git a/charts/README.md b/charts/README.md deleted file mode 100644 index 88edc6f4ae0..00000000000 --- a/charts/README.md +++ /dev/null @@ -1,119 +0,0 @@ -# Helm charts -This chart adds all the components required to run feast, sans the stores to which you might want to ingest your features. Those will have to be deployed seperately. - -## Installing the chart - -``` -helm dep update -helm install --name feast . -``` - -## Requirements - -### External Permissions - -The feast deployment requires access to the following components: - -- `gcs`: read/write - -As well as the following, depending on your use case: - -- `bigquery`: read/write -- `bigtable`: read/write/tableAdmin - -The recommended way to give your deployment these permissions is to deploy Feast on a cluster with the required permissions. - -## Components - -The components included in this chart are: - -- `feast-core`: the main API controller of feast -- `feast-metadata`: postgres db that persists feast's metadata - - The stable postgres chart is being used for this deployment - any of the parameters described [here](https://github.com/helm/charts/tree/master/stable/postgresql) can be overridden to customise your postgres deployment. -- `feast-serving`: service that serves up features from the various serving stores - -Components that Feast supports, but this installation will not include are: - -- Serving Storage - - `Redis` or `Bigtable` dbs for feature serving storage -- [TICK stack](https://www.influxdata.com/time-series-platform/) for metrics monitoring - - Set `statsd.host` and `statsd.port` to direct job metrics to your metrics store. - - Note that if you do not provision a metrics store, feast will only retain the latest metrics from your jobs. -- [Jaeger tracing](www.jaegertracing.io) for serving performance. - - Set `serving.jaeger.enabled` to `true`, and configure the following parameters: - - `serving.jaeger.host` - - `serving.jaeger.port` - - `serving.jaeger.options.samplerType` - - `serving.jaeger.options.samplerParam` - -## Uninstalling Feast - -To uninstall the `feast` deployment: -``` -helm del feast -kubectl delete persistentvolumeclaim feast-postgresql -``` - -## Configuration -The following table lists the configurable parameters of the Feast chart and their default values. - -| var | desc | default | -| --- | --- | --- | -| `core.image.registry` | core docker image registry | feast | -| `core.image.repository` | core docker image repository | feast-core | -| `core.image.tag` | core docker image version | 0.1.0 | -| `core.jobs.monitoring.initialDelay` | delay before a job starts to be monitored in ms | 60000 | -| `core.jobs.monitoring.period` | polling interval for jobs monitoring in ms | 5000 | -| `core.jobs.options` | additional options to be provided to the beam job. Should be a char escaped json k-v object | {} | -| `core.jobs.runner` | beam job runner - one of `DirectRunner`, `FlinkRunner` or `DataflowRunner` | DirectRunner | -| `core.jobs.workspace` | workspace path for ingestion jobs, used for separate job workspaces to share importJobSpecs.yaml with ingestion and for writing errors to if no default errors store is configured | nil | -| `core.replicaCount` | core deployment replica count | 3 | -| `core.resources.limits.cpu` | core cpu limits | 1 | -| `core.resources.limits.memory` | core memory limits | 2G | -| `core.resources.requests.cpu` | core cpu requested | 1 | -| `core.resources.requests.memory` | core memory requested | 2G | -| `core.service.extIPAdr` | Internal load balancer IP Address for core, required so jobs on external runners can connect to core | nil | -| `core.service.loadBalancerSourceRanges` | IP source ranges that will have access to core. If not set, will default to 0.0.0.0/0 | nil | -| `core.service.grpc.port` | core service exposed grpc port | 8433 | -| `core.service.grpc.targetPort` | core service target grpc port | 8433 | -| `core.service.http.port` | core service exposed http port | 80 | -| `core.service.http.targetPort` | core service target http port | 8080 | -| `core.projectId` | GCP project ID core service resides at | gcp-project-id | -| `core.trainingDatasetPrefix` | prefix for training datasets created in bq | fs | -| `dataflow.location` | desired dataflow's region | nil | -| `dataflow.projectID` | desired dataflow's project id | nil | -| `serving.config.maxEntityPerBatch` | max entities that can be requested at a time | 2000 | -| `serving.config.maxNumberOfThread` | max number of threads per instance of serving | 256 | -| `serving.config.redisPool.maxIdle` | max idle connections to redis | 16 | -| `serving.config.redisPool.maxSize` | max number of connections to redis | 256 | -| `serving.config.timeout` | request timeout in seconds | 5 | -| `serving.image.registry` | serving docker image registry | feast | -| `serving.image.repository` | serving docker image repository | feast-serving | -| `serving.image.tag` | serving docker image version | 0.1.0 | -| `serving.replicaCount` | serving replica count | 4 | -| `serving.resources.limits.cpu` | serving cpu limits | 1 | -| `serving.resources.limits.memory` | serving memory limits | 2G | -| `serving.resources.requests.cpu` | serving cpu requested | 1 | -| `serving.resources.requests.memory` | serving memory requested | 2G | -| `serving.service.grpc.port` | serving service exposed grpc port | 8433 | -| `serving.service.grpc.targetPort` | serving service target grpc port | 8433 | -| `serving.service.http.port` | serving service exposed http port | 80 | -| `serving.service.http.targetPort` | serving service target http port | 8080 | -| `serving.service.extIPAdr` | Internal load balancer IP Address for serving, required so jobs on external runners can connect to the service | nil | -| `serving.service.loadBalancerSourceRanges` | IP source ranges that will have access to serving. If not set, will default to 0.0.0.0/0 | nil | -| `serviceAccount.name` | service account secret name to mount to deployments | nil | -| `serviceAccount.key` | service account secret key to mount to deployments | nil | -| `statsd.host` | host of statsd daemon for job metrics to be sent to | nil | -| `statsd.port` | port of statsd daemon for job metrics to be sent to | nil | -| `store.errors.type` | type of default errors store to write errors to. One of `stdout`, `stderr`, `file.json` | nil | -| `store.errors.options` | additional options for the default error store in json string format | `{}` | -| `store.serving.type` | type of default serving store to write errors to. One of `redis`, `bigtable` | nil | -| `store.serving.options` | additional options for the default serving store in json string format | `{}` | -| `store.warehouse.type` | type of default warehouse store to write errors to. One of `bigquery`, `file.json` | nil | -| `store.warehouse.options` | additional options for the default warehouse store in json string format | `{}` | -| `postgresql.provision` | Provision PostgreSQL | true | -| `postgresql.postgresPassword` | specify password if you want the postgres password secret to be generated | nil | -| `postgresql.resources.requests.cpu` | postgres requested cpu | 100m | -| `postgresql.resources.requests.memory` | postgres requested memory | 256Mi | -| `redis.provision` | Provision Redis instance | true | -| `redis.name` | Helm release name for the Redis instance | feast-redis | \ No newline at end of file diff --git a/charts/dist/feast-0.1.2.tgz b/charts/dist/feast-0.1.2.tgz new file mode 100644 index 00000000000..fb14c60ae9b Binary files /dev/null and b/charts/dist/feast-0.1.2.tgz differ diff --git a/charts/dist/index.yaml b/charts/dist/index.yaml new file mode 100644 index 00000000000..0a0f157e63d --- /dev/null +++ b/charts/dist/index.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +entries: + feast: + - apiVersion: v1 + appVersion: 0.1.2 + created: "2019-07-26T16:00:55.364039+08:00" + description: A Helm chart to install Feast on kubernetes + digest: 41f3194c2f69b1ea22e1f7b65b8448d8064ce9260cef47a228da78ccefc3fafb + name: feast + urls: + - feast-0.1.2.tgz + version: 0.1.2 +generated: "2019-07-26T16:00:55.356884+08:00" diff --git a/charts/feast/charts/postgresql-3.17.0.tgz b/charts/feast/charts/postgresql-3.17.0.tgz deleted file mode 100644 index 3fe96f84b2b..00000000000 Binary files a/charts/feast/charts/postgresql-3.17.0.tgz and /dev/null differ diff --git a/charts/feast/charts/redis-6.4.4.tgz b/charts/feast/charts/redis-6.4.4.tgz deleted file mode 100644 index 462d8559697..00000000000 Binary files a/charts/feast/charts/redis-6.4.4.tgz and /dev/null differ diff --git a/charts/feast/charts/redis-6.4.5.tgz b/charts/feast/charts/redis-6.4.5.tgz new file mode 100644 index 00000000000..a388fd07cea Binary files /dev/null and b/charts/feast/charts/redis-6.4.5.tgz differ diff --git a/charts/feast/requirements.lock b/charts/feast/requirements.lock deleted file mode 100644 index 1867d05b4af..00000000000 --- a/charts/feast/requirements.lock +++ /dev/null @@ -1,9 +0,0 @@ -dependencies: -- name: postgresql - repository: https://kubernetes-charts.storage.googleapis.com - version: 3.17.0 -- name: redis - repository: https://kubernetes-charts.storage.googleapis.com - version: 6.4.4 -digest: sha256:e1fb31a7beda17b642a0e1a5369bb8184a7206573e48f07b1bb4ebe35ba9bead -generated: 2019-04-24T16:00:44.597053887+08:00 diff --git a/charts/feast/requirements.yaml b/charts/feast/requirements.yaml deleted file mode 100644 index d47d69395e9..00000000000 --- a/charts/feast/requirements.yaml +++ /dev/null @@ -1,9 +0,0 @@ -dependencies: -- name: postgresql - version: 3.17.0 - repository: "@stable" - condition: postgres.provision -- name: redis - version: 6.4.4 - repository: "@stable" - condition: redis.provision \ No newline at end of file diff --git a/charts/feast/templates/_helpers.tpl b/charts/feast/templates/_helpers.tpl deleted file mode 100644 index 98041e5c1db..00000000000 --- a/charts/feast/templates/_helpers.tpl +++ /dev/null @@ -1,72 +0,0 @@ -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "feast.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "feast.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create a fully qualified core name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -*/}} -{{- define "feast.core.name" -}} -{{- $nameGlobalOverride := printf "%s-core" (include "feast.fullname" .) -}} -{{- if .Values.core.fullnameOverride -}} -{{- printf "%s" .Values.core.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s" $nameGlobalOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} - -{{/* -Create a fully qualified serving name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -*/}} -{{- define "feast.serving.name" -}} -{{- $nameGlobalOverride := printf "%s-serving" (include "feast.fullname" .) -}} -{{- if .Values.core.fullnameOverride -}} -{{- printf "%s" .Values.serving.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s" $nameGlobalOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "postgresql.fullname" -}} -{{- $name := default "postgresql" -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "feast.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml deleted file mode 100644 index 74b625de029..00000000000 --- a/charts/feast/templates/core-deploy.yaml +++ /dev/null @@ -1,138 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ template "feast.core.name" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "feast.name" . }} - component: core - chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.core.replicaCount }} - selector: - matchLabels: - app: {{ template "feast.name" . }} - component: core - release: {{ .Release.Name }} - strategy: - type: RollingUpdate - rollingUpdate: - maxSurge: {{ .Values.core.rollingUpdate.maxSurge }} - maxUnavailable: {{ .Values.core.rollingUpdate.maxUnavailable }} - template: - metadata: - labels: - app: {{ template "feast.name" . }} - component: core - release: {{ .Release.Name }} - spec: - containers: - - name: {{ template "feast.core.name" . }} - image: "{{ .Values.core.image.registry }}/{{ .Values.core.image.repository }}:{{ .Values.core.image.tag }}" - imagePullPolicy: {{ .Values.core.image.pullPolicy }} - ports: - - containerPort: {{ .Values.core.service.http.targetPort }} - name: "http" - - containerPort: {{ .Values.core.service.grpc.targetPort }} - name: "grpc" - livenessProbe: - httpGet: - path: /ping - port: {{ .Values.core.service.http.targetPort }} - scheme: HTTP - initialDelaySeconds: {{ .Values.core.livenessProbe.initialDelaySeconds }} - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 5 - failureThreshold: {{ .Values.core.livenessProbe.failureThreshold }} - readinessProbe: - httpGet: - path: /healthz - port: {{ .Values.core.service.http.targetPort }} - scheme: HTTP - initialDelaySeconds: {{ .Values.core.readinessProbe.initialDelaySeconds }} - periodSeconds: 5 - successThreshold: 1 - timeoutSeconds: 3 - failureThreshold: {{ .Values.core.readinessProbe.failureThreshold }} - resources: - requests: - cpu: {{ .Values.core.resources.requests.cpu }} - memory: {{ .Values.core.resources.requests.memory }} - limits: - cpu: {{ .Values.core.resources.limits.cpu }} - memory: {{ .Values.core.resources.limits.memory }} - {{- if .Values.serviceAccount }} - volumeMounts: - - name: "{{ .Values.serviceAccount.name }}" - mountPath: "/etc/gcloud/service-accounts" - readOnly: true - {{- end }} - env: - - name: GRPC_PORT - value: "{{ .Values.core.service.grpc.targetPort }}" - - name: DB_HOST - value: {{ printf "%s.%s.svc.cluster.local" (include "postgresql.fullname" .) .Release.Namespace }} - - name: DB_PORT - value: "{{ .Values.postgresql.service.port }}" - - name: DB_PASSWORD - valueFrom: - secretKeyRef: - name: {{ template "postgresql.secretName" . }} - key: postgresql-password - - name: LOG_TYPE - value: {{ .Values.core.logType }} - - name: PROJECT_ID - value: "{{ .Values.core.projectId}}" - - name: TRAINING_DATASET_PREFIX - value: "{{ .Values.core.trainingDatasetPrefix }}" - - name: JOB_RUNNER - value: {{ .Values.core.jobs.runner }} - - name: JOB_WORKSPACE - value: {{ .Values.core.jobs.workspace }} - - name: JOB_OPTIONS - value: {{ .Values.core.jobs.options | toJson}} - - name: JOB_MONITOR_PERIOD_MS - value: "{{ .Values.core.jobs.monitoring.period }}" - - name: JOB_MONITOR_INITIAL_DELAY_MS - value: "{{ .Values.core.jobs.monitoring.initialDelay }}" - {{- if .Values.store }} - {{- if .Values.store.serving }} - - name: STORE_SERVING_TYPE - value: {{ .Values.store.serving.type }} - - name: STORE_SERVING_OPTIONS - value: {{ .Values.store.serving.options | toJson}} - {{- end }} - {{- if .Values.store.warehouse }} - - name: STORE_WAREHOUSE_TYPE - value: {{ .Values.store.warehouse.type }} - - name: STORE_WAREHOUSE_OPTIONS - value: {{ .Values.store.warehouse.options | toJson}} - {{- end }} - {{- if .Values.store.errors }} - - name: STORE_ERRORS_TYPE - value: {{ .Values.store.errors.type }} - - name: STORE_ERRORS_OPTIONS - value: {{ .Values.store.errors.options | toJson}} - {{- end }} - {{- end }} - - name: STATSD_HOST - value: {{ .Values.statsd.host }} - - name: STATSD_PORT - value: "{{ .Values.statsd.port }}" - {{- if .Values.dataflow }} - - name: DATAFLOW_PROJECT_ID - value: {{ .Values.dataflow.projectID }} - - name: DATAFLOW_LOCATION - value: {{ .Values.dataflow.location }} - {{- end }} - {{- if .Values.serviceAccount }} - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /etc/gcloud/service-accounts/{{ .Values.serviceAccount.key }} - volumes: - - name: "{{ .Values.serviceAccount.name }}" - secret: - secretName: "{{ .Values.serviceAccount.name }}" - {{- end }} \ No newline at end of file diff --git a/charts/feast/templates/core-service.yaml b/charts/feast/templates/core-service.yaml deleted file mode 100644 index baad3fd103f..00000000000 --- a/charts/feast/templates/core-service.yaml +++ /dev/null @@ -1,34 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ template "feast.core.name" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "feast.name" . }} - chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} - {{- with .Values.core.service.annotations }} - annotations: -{{ toYaml . | indent 4 }} - {{- end }} -spec: - type: {{ .Values.core.service.type }} - {{- if .Values.core.service.extIPAdr }} - loadBalancerIP: {{ .Values.core.service.extIPAdr }} - {{- end }} - {{- if .Values.core.service.loadBalancerSourceRanges }} - loadBalancerSourceRanges: -{{ toYaml .Values.core.service.loadBalancerSourceRanges | indent 2 }} - {{- end }} - ports: - - name: http - port: {{ .Values.core.service.http.port }} - targetPort: {{ .Values.core.service.http.targetPort }} - - name: grpc - port: {{ .Values.core.service.grpc.port }} - targetPort: {{ .Values.core.service.grpc.targetPort }} - selector: - app: {{ template "feast.name" . }} - component: core - release: {{ .Release.Name }} diff --git a/charts/feast/templates/serving-deploy.yaml b/charts/feast/templates/serving-deploy.yaml deleted file mode 100644 index b84aca8c716..00000000000 --- a/charts/feast/templates/serving-deploy.yaml +++ /dev/null @@ -1,106 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ template "feast.serving.name" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "feast.name" . }} - component: serving - chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.serving.replicaCount }} - selector: - matchLabels: - app: {{ template "feast.name" . }} - component: serving - release: {{ .Release.Name }} - strategy: - type: RollingUpdate - rollingUpdate: - maxSurge: {{ .Values.serving.rollingUpdate.maxSurge }} - maxUnavailable: {{ .Values.serving.rollingUpdate.maxUnavailable }} - template: - metadata: - creationTimestamp: null - labels: - app: {{ template "feast.name" . }} - component: serving - release: {{ .Release.Name }} - spec: - containers: - - name: {{ template "feast.serving.name" . }} - image: "{{ .Values.serving.image.registry }}/{{ .Values.serving.image.repository }}:{{ .Values.serving.image.tag }}" - imagePullPolicy: {{ .Values.serving.image.pullPolicy }} - ports: - - containerPort: {{ .Values.serving.service.grpc.targetPort }} - name: grpc - - containerPort: {{ .Values.serving.service.http.targetPort }} - name: http - livenessProbe: - httpGet: - path: /ping - port: {{ .Values.serving.service.http.targetPort }} - scheme: HTTP - initialDelaySeconds: {{ .Values.serving.livenessProbe.initialDelaySeconds }} - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 5 - failureThreshold: {{ .Values.serving.livenessProbe.failureThreshold }} - readinessProbe: - httpGet: - path: /healthz - port: {{ .Values.serving.service.http.targetPort }} - scheme: HTTP - initialDelaySeconds: {{ .Values.serving.readinessProbe.initialDelaySeconds }} - periodSeconds: 5 - successThreshold: 1 - timeoutSeconds: 3 - failureThreshold: {{ .Values.serving.readinessProbe.failureThreshold }} - resources: - requests: - cpu: "{{ .Values.serving.resources.requests.cpu }}" - memory: "{{ .Values.serving.resources.requests.memory }}" - limits: - cpu: "{{ .Values.serving.resources.limits.cpu }}" - memory: "{{ .Values.serving.resources.limits.memory }}" - env: - - name: FEAST_SERVING_HTTP_PORT - value: "{{ .Values.serving.service.http.targetPort }}" - - name: FEAST_SERVING_GRPC_PORT - value: "{{ .Values.serving.service.grpc.targetPort }}" - - name: FEAST_CORE_HOST - value: "{{ printf "%s.%s.svc.cluster.local" (include "feast.core.name" .) .Release.Namespace }}" - - name: FEAST_CORE_GRPC_PORT - value: "{{ .Values.core.service.grpc.port }}" - - name: STORE_SERVING_TYPE - value: {{ .Values.store.serving.type }} - - name: STORE_SERVING_OPTIONS - value: {{ .Values.store.serving.options | toJson }} - - name: FEAST_MAX_NB_THREAD - value: "{{ .Values.serving.config.maxNumberOfThread }}" - - name: FEAST_MAX_ENTITY_PER_BATCH - value: "{{ .Values.serving.config.maxEntityPerBatch }}" - - name: FEAST_RETRIEVAL_TIMEOUT - value: "{{ .Values.serving.config.timeout }}" - - name: FEAST_REDIS_POOL_MAX_SIZE - value: "{{ .Values.serving.config.redisPool.maxSize }}" - - name: FEAST_REDIS_POOL_MAX_IDLE - value: "{{ .Values.serving.config.redisPool.maxIdle }}" - - name: STATSD_HOST - value: "{{ .Values.statsd.host }}" - - name: STATSD_PORT - value: "{{ .Values.statsd.port }}" - {{- if .Values.serving.jaeger.enabled }} - - name: JAEGER_ENABLED - value: "{{ .Values.serving.jaeger.enabled }}" - - name: JAEGER_AGENT_HOST - value: "{{ .Values.serving.jaeger.host }}" - - name: JAEGER_AGENT_PORT - value: "{{ .Values.serving.jaeger.port }}" - - name: JAEGER_SAMPLER_TYPE - value: "{{ .Values.serving.jaeger.options.samplerType }}" - - name: JAEGER_SAMPLER_PARAM - value: "{{ .Values.serving.jaeger.options.samplerParam }}" - {{- end }} diff --git a/charts/feast/templates/serving-service.yaml b/charts/feast/templates/serving-service.yaml deleted file mode 100644 index 90a7cbd948e..00000000000 --- a/charts/feast/templates/serving-service.yaml +++ /dev/null @@ -1,34 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ template "feast.serving.name" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "feast.name" . }} - chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} - {{- with .Values.serving.service.annotations }} - annotations: -{{ toYaml . | indent 4 }} - {{- end }} -spec: - type: {{ .Values.serving.service.type }} - {{- if .Values.serving.service.extIPAdr }} - loadBalancerIP: {{ .Values.serving.service.extIPAdr }} - {{- end }} - {{- if .Values.serving.service.loadBalancerSourceRanges }} - loadBalancerSourceRanges: -{{ toYaml .Values.serving.service.loadBalancerSourceRanges | indent 2 }} - {{- end }} - ports: - - name: grpc - port: {{ .Values.serving.service.grpc.port }} - targetPort: {{ .Values.serving.service.grpc.targetPort }} - - name: http - port: {{ .Values.serving.service.http.port }} - targetPort: {{ .Values.serving.service.http.targetPort }} - selector: - app: {{ template "feast.name" . }} - component: serving - release: {{ .Release.Name }} diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml deleted file mode 100644 index 82ed93c9778..00000000000 --- a/charts/feast/values.yaml +++ /dev/null @@ -1,127 +0,0 @@ ---- -core: - projectId: "gcp-project-id" - image: - pullPolicy: IfNotPresent - registry: gcr.io/kf-feast - repository: feast-core - tag: "0.1.1" - replicaCount: 1 - resources: - limits: - cpu: 4 - memory: 6G - requests: - cpu: 1 - memory: 2G - rollingUpdate: - maxSurge: 2 - maxUnavailable: 0 - service: - type: ClusterIP - grpc: - port: 6565 - targetPort: 6565 - http: - port: 80 - targetPort: 8080 - # loadBalancerSourceRanges sets the accepted IP ranges for firewall ingress rule - # this firewall rule is usually created when the service type is "LoadBalancer" - # loadBalancerSourceRanges: ["10.0.0.0/8"] - jobs: - workspace: "/tmp" - # runner specifies the Beam pipeline runner, use either DirectRunner (for development) or DataflowRunner (for production) - runner: DirectRunner - options: "{}" - errorStoreType: "stdout" - errorStoreOptions: "{}" - monitoring: - period: 5000 - initialDelay: 60000 - trainingDatasetPrefix: "fs" - # logType: JSON - livenessProbe: - initialDelaySeconds: 120 - failureThreshold: 3 - readinessProbe: - initialDelaySeconds: 60 - failureThreshold: 1 - -# dataflow configuration is required when core.jobs.runner=DataflowRunner -# dataflow: -# projectID: ${GCP_PROJECT} -# location: ${GCP_REGION} - -store: - errors: - type: "stdout" - warehouse: - type: "bigquery" - # options: '{"project": "gcp-project-id", "dataset": "feast"}' - serving: - type: "redis" - # options: '{"host": "redis-master", "port": "6379"}' - -postgresql: - provision: true - persistence: - enabled: true - -redis: - provision: false - cluster: - enabled: false - -serving: - config: - maxEntityPerBatch: 2000 - maxNumberOfThread: 128 - redisPool: - maxIdle: 16 - maxSize: 128 - timeout: 5 - image: - pullPolicy: IfNotPresent - registry: gcr.io/kf-feast - repository: feast-serving - tag: "0.1.1" - replicaCount: 1 - resources: - limits: - cpu: 2 - memory: 4G - requests: - cpu: 1 - memory: 1G - rollingUpdate: - maxSurge: 2 - maxUnavailable: 0 - service: - type: ClusterIP - grpc: - port: 6565 - targetPort: 6565 - http: - port: 80 - targetPort: 8080 - # loadBalancerSourceRanges sets the accepted IP ranges for firewall ingress rule - # this firewall rule is usually created when the service type is "LoadBalancer" - # loadBalancerSourceRanges: ["10.0.0.0/8"] - jaeger: - enabled: false - livenessProbe: - initialDelaySeconds: 120 - failureThreshold: 3 - readinessProbe: - initialDelaySeconds: 30 - failureThreshold: 1 - -# Enable only if you have an existing service account you -# want to mount the secret of. -# serviceAccount: -# name: feast-service-account -# key: service-account.json - -statsd: - host: "localhost" - port: 8125 diff --git a/cli/.gitignore b/cli/.gitignore deleted file mode 100644 index 5d0a73c7c6b..00000000000 --- a/cli/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -feast/yamls -feast/feast diff --git a/cli/Makefile b/cli/Makefile deleted file mode 100644 index 91cb05a39ab..00000000000 --- a/cli/Makefile +++ /dev/null @@ -1,23 +0,0 @@ -.PHONY: go -VERSION_FILE=../VERSION -FEAST_VERSION=`cat $(VERSION_FILE)` - -build-all: - @$(MAKE) cli-linux cli-darwin - -.PHONY: cli-linux -cli-linux: - mkdir -p bin - export GOOS=linux; \ - export GOARCH=amd64; \ - mkdir -p bin/$$GOOS-$$GOARCH && go build \ - -ldflags "-X main.Version=$(FEAST_VERSION)" \ - -o bin/$$GOOS-$$GOARCH/feast feast/main.go - -.PHONY: cli-darwin -cli-darwin: - export GOOS=darwin; \ - export GOARCH=amd64; \ - mkdir -p bin/$$GOOS-$$GOARCH && go build \ - -ldflags "-X main.Version=$(FEAST_VERSION)" \ - -o bin/$$GOOS-$$GOARCH/feast feast/main.go \ No newline at end of file diff --git a/cli/README.md b/cli/README.md deleted file mode 100644 index 19ff0753e22..00000000000 --- a/cli/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Feast CLI - -The feast command-line tool, `feast`, is used to register resources to -feast, as well as manage and run ingestion jobs. - -## Installation - -The quickest way to get the CLI is to download the compiled binary: - -```sh -# For Mac OS users -wget https://github.com/gojek/feast/releases/download/v0.1.1/feast-cli-v0.1.1-darwin-amd64 -chmod +x feast-cli-v0.1.1-darwin-amd64 -sudo mv feast-cli-v0.1.1-darwin-amd64 /usr/local/bin/feast - -# For Linux users -wget https://github.com/gojek/feast/releases/download/v0.1.1/feast-cli-v0.1.1-linux-amd64 -chmod +x feast-cli-v0.1.1-linux-amd64 -sudo mv feast-cli-v0.1.1-linux-amd64 /usr/local/bin/feast -``` - -### Building from source - -If you want to develop the CLI or build it from source, you need to have at least Golang version 1.11 installed because Feast use go modules. - -```sh -git clone https://github.com/gojek/feast -cd feast -go build -o feast ./cli/feast - -# Test running feast CLI -./feast -``` diff --git a/cli/feast/cmd/apply.go b/cli/feast/cmd/apply.go deleted file mode 100644 index 1fa61c2d5e8..00000000000 --- a/cli/feast/cmd/apply.go +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "context" - "errors" - "fmt" - "io/ioutil" - "path/filepath" - - "github.com/gojek/feast/cli/feast/pkg/parse" - "github.com/gojek/feast/protos/generated/go/feast/core" - - "github.com/spf13/cobra" -) - -// applyCmd represents the apply command -var applyCmd = &cobra.Command{ - Use: "apply [resource] [filepaths...]", - Short: "Apply a resource given one or many yaml files.", - Long: `Apply a resource from one or multiple yamls. - -Valid resources include: -- entity -- feature -- featureGroup - -Examples: -- feast apply entity entity.yml -- feast apply feature *-feature.yml`, - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - - if len(args) < 2 { - fmt.Println(args) - return errors.New("invalid number of arguments for apply command") - } - - initConn() - ctx := context.Background() - coreCli := core.NewCoreServiceClient(coreConn) - resource := args[0] - paths := args[1:] - - for _, fp := range paths { - if isYaml(fp) { - fmt.Printf("Applying %s at %s\n", resource, fp) - regID, err := apply(ctx, coreCli, resource, fp) - if err != nil { - return fmt.Errorf("failed to apply %s at path %s: %v", resource, fp, err) - } - fmt.Printf("Successfully applied %s %s\n", resource, regID) - } - } - return nil - }, -} - -func init() { - rootCmd.AddCommand(applyCmd) -} - -func apply(ctx context.Context, coreCli core.CoreServiceClient, resource string, fileLocation string) (string, error) { - yml, err := ioutil.ReadFile(fileLocation) - if err != nil { - return "", fmt.Errorf("error reading file at %s: %v", fileLocation, err) - } - - switch resource { - case "feature": - return applyFeature(ctx, coreCli, yml) - case "featureGroup": - return applyFeatureGroup(ctx, coreCli, yml) - case "entity": - return applyEntity(ctx, coreCli, yml) - default: - return "", fmt.Errorf("invalid resource %s: please choose one of [feature, featureGroup, entity, storage]", resource) - } -} - -func applyFeature(ctx context.Context, coreCli core.CoreServiceClient, yml []byte) (string, error) { - fs, err := parse.YamlToFeatureSpec(yml) - if err != nil { - return "", err - } - _, err = coreCli.ApplyFeature(ctx, fs) - return fs.GetId(), err -} - -func applyFeatureGroup(ctx context.Context, coreCli core.CoreServiceClient, yml []byte) (string, error) { - fgs, err := parse.YamlToFeatureGroupSpec(yml) - if err != nil { - return "", err - } - _, err = coreCli.ApplyFeatureGroup(ctx, fgs) - return fgs.GetId(), err -} - -func applyEntity(ctx context.Context, coreCli core.CoreServiceClient, yml []byte) (string, error) { - es, err := parse.YamlToEntitySpec(yml) - if err != nil { - return "", err - } - _, err = coreCli.ApplyEntity(ctx, es) - return es.GetName(), err -} - -func isYaml(path string) bool { - ext := filepath.Ext(path) - if ext == ".yaml" || ext == ".yml" { - return true - } - return false -} diff --git a/cli/feast/cmd/apply_test.go b/cli/feast/cmd/apply_test.go deleted file mode 100644 index df919065e2e..00000000000 --- a/cli/feast/cmd/apply_test.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "context" - "testing" - - "github.com/gojek/feast/cli/feast/cmd/mock" - "github.com/golang/mock/gomock" - - "github.com/gojek/feast/protos/generated/go/feast/core" -) - -func Test_apply(t *testing.T) { - mockCore := mock.NewMockCoreServiceClient(gomock.NewController(t)) - mockCore.EXPECT().ApplyEntity(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() - mockCore.EXPECT().ApplyFeature(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() - mockCore.EXPECT().ApplyFeatureGroup(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() - - type args struct { - ctx context.Context - coreCli core.CoreServiceClient - resource string - fileLocation string - } - tests := []struct { - name string - args args - want string - wantErr bool - }{ - { - name: "test apply invalid resource", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "invalidResource", - fileLocation: "testdata/valid_entity.yaml", - }, - want: "", - wantErr: true, - }, - { - name: "test apply entity", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "entity", - fileLocation: "testdata/valid_entity.yaml", - }, - want: "myentity", - wantErr: false, - }, - { - name: "test apply entity with non-existent file", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "entity", - fileLocation: "testdata/file_not_exists.yaml", - }, - want: "", - wantErr: true, - }, - { - name: "test apply entity with no tag", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "entity", - fileLocation: "testdata/valid_entity_no_tag.yaml", - }, - want: "myentity", - wantErr: false, - }, - { - name: "test apply invalid syntax in entity yaml", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "entity", - fileLocation: "testdata/invalid_entity.yaml", - }, - want: "", - wantErr: true, - }, - { - name: "test apply feature", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "feature", - fileLocation: "testdata/valid_feature.yaml", - }, - want: "myentity.feature_bool_redis1", - wantErr: false, - }, - { - name: "test apply feature group", - args: args{ - ctx: context.Background(), - coreCli: mockCore, - resource: "featureGroup", - fileLocation: "testdata/valid_feature_group.yaml", - }, - want: "my_fg", - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := apply(tt.args.ctx, tt.args.coreCli, tt.args.resource, tt.args.fileLocation) - if (err != nil) != tt.wantErr { - t.Errorf("apply() error = %v, wantErr %v", err, tt.wantErr) - return - } - if got != tt.want { - t.Errorf("apply() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cli/feast/cmd/common.go b/cli/feast/cmd/common.go deleted file mode 100644 index 81a52bcd057..00000000000 --- a/cli/feast/cmd/common.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "fmt" - - "google.golang.org/grpc" -) - -// global variables -var ( - cfg *cliCfg - coreConn *grpc.ClientConn -) - -func initConn() { - var err error - coreConn, err = grpc.Dial(cfg.CoreURI, grpc.WithInsecure()) - if err != nil { - handleErr(fmt.Errorf("unable to connect to core service at %s: %v", cfg.CoreURI, err)) - } -} - -func closeConn() { - if coreConn != nil { - coreConn.Close() - } -} diff --git a/cli/feast/cmd/config.go b/cli/feast/cmd/config.go deleted file mode 100644 index be17266bd92..00000000000 --- a/cli/feast/cmd/config.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "errors" - "fmt" - "io/ioutil" - "os" - "path" - - "gopkg.in/yaml.v2" - - homedir "github.com/mitchellh/go-homedir" - "github.com/spf13/cobra" - "github.com/spf13/viper" -) - -var cfgFile string - -type cliCfg struct { - CoreURI string `yaml:"coreURI"` -} - -// configCmd represents the config command -var configCmd = &cobra.Command{ - Use: "config", - Short: "config utils for feast", -} - -var configSetCmd = &cobra.Command{ - Use: "set [key] [value]", - Short: "set configuration for the feast cli", - Long: `set configuration for a given key to the provided value. - -Available keys: - - coreURI: host:port uri to connect to feast core via grpc - -Example: - feast config set coreURI localhost:8433`, - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - if len(args) > 2 { - return fmt.Errorf("unable to set config value: %s", "too many arguments provided") - } - err := setConfig(args[0], args[1]) - if err != nil { - return fmt.Errorf("unable to set config value: %s", err) - } - return nil - }, -} - -var configListCmd = &cobra.Command{ - Use: "list", - Short: "list current configuration", - RunE: func(cmd *cobra.Command, args []string) error { - fmt.Printf(`[Current configuration] -coreURI: %s`+"\n", cfg.CoreURI) - return nil - }, -} - -func init() { - configCmd.AddCommand(configSetCmd) - configCmd.AddCommand(configListCmd) - rootCmd.AddCommand(configCmd) -} - -func initConfig() { - home, err := homedir.Dir() - handleErr(err) - - cfgFile = path.Join(home, ".feast") - viper.SetConfigType("yaml") - viper.SetConfigFile(cfgFile) - if _, err := os.Stat(cfgFile); os.IsNotExist(err) { - fmt.Printf("unable to locate configuration at %s. Creating configuration file...\n", - cfgFile) - f, err := os.OpenFile(cfgFile, os.O_RDWR|os.O_CREATE, 0666) - handleErr(err) - f.Write([]byte("coreURI: \"\"")) - f.Close() - } - cfg = &cliCfg{} - viper.AutomaticEnv() // read in environment variables that match - if err := viper.ReadInConfig(); err == nil { - cfg.CoreURI = viper.GetString("coreURI") - } else { - handleErr(err) - } -} - -func setConfig(key string, value string) error { - switch key { - case "coreURI": - cfg.CoreURI = value - fmt.Printf("[config] coreURI set to %s.\n", value) - default: - return errors.New("invalid key provided. Available keys: [coreURI]") - } - d, err := yaml.Marshal(cfg) - if err != nil { - return fmt.Errorf("error writing to config file: %v", err) - } - err = ioutil.WriteFile(cfgFile, d, 0644) - return err -} diff --git a/cli/feast/cmd/config_test.go b/cli/feast/cmd/config_test.go deleted file mode 100644 index 5c6010c4e18..00000000000 --- a/cli/feast/cmd/config_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "github.com/mitchellh/go-homedir" - "os" - "testing" -) - -func Test_initConfig(t *testing.T) { - tests := []struct { - name string - }{ - {"test creation of default config at $HOME/.feast"}, - } - - defaultFeastConfigFile, _ := homedir.Expand("~/.feast") - _ = os.Remove(defaultFeastConfigFile) - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - initConfig() - _, err := os.Stat(defaultFeastConfigFile) - if os.IsNotExist(err) { - t.Errorf("initConfig should create config file at " + defaultFeastConfigFile) - } - }) - } -} diff --git a/cli/feast/cmd/get.go b/cli/feast/cmd/get.go deleted file mode 100644 index 81dd2cb54dd..00000000000 --- a/cli/feast/cmd/get.go +++ /dev/null @@ -1,100 +0,0 @@ -package cmd - -import ( - "context" - "errors" - "fmt" - - "github.com/gojek/feast/cli/feast/pkg/printer" - "github.com/gojek/feast/protos/generated/go/feast/core" - "github.com/spf13/cobra" -) - -// listCmd represents the list command -var getCmd = &cobra.Command{ - Use: "get [resource] [id]", - Short: "Get and print the details of the desired resource.", - Long: `Get and print the details of the desired resource. - -Valid resources include: -- entity -- feature -- job - -Examples: -- feast get entity myentity`, - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - - if len(args) != 2 { - return errors.New("invalid number of arguments for list command") - } - - initConn() - err := get(args[0], args[1]) - if err != nil { - return fmt.Errorf("failed to list %s: %v", args[0], err) - } - return nil - }, -} - -func init() { - rootCmd.AddCommand(getCmd) -} - -func get(resource string, id string) error { - ctx := context.Background() - - switch resource { - case "feature": - return getFeature(ctx, core.NewUIServiceClient(coreConn), id) - case "entity": - return getEntity(ctx, core.NewUIServiceClient(coreConn), id) - case "storage": - return getStorage(ctx, core.NewUIServiceClient(coreConn), id) - case "job": - return getJob(ctx, core.NewJobServiceClient(coreConn), id) - default: - return fmt.Errorf("invalid resource %s: please choose one of [features, entities, storage, jobs]", resource) - } -} - -func getFeature(ctx context.Context, cli core.UIServiceClient, id string) error { - response, err := cli.GetFeature(ctx, &core.UIServiceTypes_GetFeatureRequest{Id: id}) - if err != nil { - return err - } - printer.PrintFeatureDetail(response.GetFeature()) - return nil -} - -func getEntity(ctx context.Context, cli core.UIServiceClient, id string) error { - response, err := cli.GetEntity(ctx, &core.UIServiceTypes_GetEntityRequest{Id: id}) - if err != nil { - return err - } - printer.PrintEntityDetail(response.GetEntity()) - return nil -} - -// This function is deprecated, and may be removed in subsequent versions. -func getStorage(ctx context.Context, cli core.UIServiceClient, id string) error { - response, err := cli.GetStorage(ctx, &core.UIServiceTypes_GetStorageRequest{Id: id}) - if err != nil { - return err - } - printer.PrintStorageDetail(response.GetStorage()) - return nil -} - -func getJob(ctx context.Context, cli core.JobServiceClient, id string) error { - response, err := cli.GetJob(ctx, &core.JobServiceTypes_GetJobRequest{Id: id}) - if err != nil { - return err - } - printer.PrintJobDetail(response.GetJob()) - return nil -} diff --git a/cli/feast/cmd/jobs.go b/cli/feast/cmd/jobs.go deleted file mode 100644 index 8f44d9593ed..00000000000 --- a/cli/feast/cmd/jobs.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "context" - "errors" - "fmt" - "io/ioutil" - "time" - - "github.com/gojek/feast/cli/feast/pkg/parse" - "github.com/gojek/feast/protos/generated/go/feast/core" - - "github.com/spf13/cobra" -) - -var ( - waitJobComplete = false - jobName = "feastimport" -) - -// jobsCmd represents the jobs command -var jobsCmd = &cobra.Command{ - Use: "jobs", - Short: "Jobs utilities for feast", -} - -var jobsRunCmd = &cobra.Command{ - Use: "run [filepath]", - Short: "Submit a job to the jobservice and run it", - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - if len(args) > 1 { - return errors.New("invalid number of arguments for jobs run command") - } - ctx := context.Background() - return runJob(ctx, args[0]) - }, -} - -var jobsAbortCmd = &cobra.Command{ - Use: "stop [job_id]", - Short: "Stop the given job", - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - if len(args) > 1 { - return errors.New("invalid number of arguments for jobs stop command") - } - ctx := context.Background() - return abortJob(ctx, args[0]) - }, -} - -func init() { - jobsRunCmd.Flags().BoolVar(&waitJobComplete, "wait", false, "wait for job to run to completion") - jobsRunCmd.Flags().StringVar(&jobName, "name", "feastimport", "job name to be submitted") - jobsCmd.AddCommand(jobsRunCmd) - jobsCmd.AddCommand(jobsAbortCmd) - rootCmd.AddCommand(jobsCmd) -} - -func runJob(ctx context.Context, path string) error { - d, err := ioutil.ReadFile(path) - if err != nil { - return fmt.Errorf("[jobs] could not read file: %v", err) - } - is, err := parse.YamlToImportSpec(d) - if err != nil { - return fmt.Errorf("[jobs] unable to parse yaml file at %s: %v", path, err) - } - initConn() - jobsClient := core.NewJobServiceClient(coreConn) - out, err := jobsClient.SubmitJob(ctx, &core.JobServiceTypes_SubmitImportJobRequest{ - Name: jobName, - ImportSpec: is, - }) - if err != nil { - return fmt.Errorf("[jobs] failed to start job: %v", err) - } - fmt.Printf("[jobs] started job with ID: %s", out.GetJobId()) - if waitJobComplete { - return waitJob(ctx, jobsClient, out.GetJobId()) - } - return nil -} - -func waitJob(ctx context.Context, jobsClient core.JobServiceClient, jobID string) error { - for { - response, err := jobsClient.GetJob(ctx, &core.JobServiceTypes_GetJobRequest{ - Id: jobID, - }) - if err != nil { - return fmt.Errorf("[jobs] error while querying job id %s: %v", jobID, err) - } - - status := response.GetJob().GetStatus() - fmt.Printf("\r[jobs] job id %s is currently: %s\n", jobID, status) - switch status { - case "COMPLETED": - return nil - case "ABORTED": - return fmt.Errorf("[jobs] job id %s failed: Job was aborted", jobID) - case "ERROR": - return fmt.Errorf("[jobs] job id %s failed: Job terminated with error. For more information, refer to job logs", jobID) - } - time.Sleep(5 * time.Second) - } -} - -func abortJob(ctx context.Context, id string) error { - initConn() - jobsClient := core.NewJobServiceClient(coreConn) - response, err := jobsClient.AbortJob(ctx, &core.JobServiceTypes_AbortJobRequest{Id: id}) - if err != nil { - return err - } - fmt.Printf("Aborting job with id: %s\n", response.GetId()) - return nil -} diff --git a/cli/feast/cmd/list.go b/cli/feast/cmd/list.go deleted file mode 100644 index 9995305d9c8..00000000000 --- a/cli/feast/cmd/list.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "context" - "errors" - "fmt" - "os" - "strings" - "text/tabwriter" - - "github.com/gojek/feast/cli/feast/pkg/timeutil" - "github.com/gojek/feast/protos/generated/go/feast/core" - - "github.com/golang/protobuf/ptypes/empty" - "github.com/spf13/cobra" -) - -// listCmd represents the list command -var listCmd = &cobra.Command{ - Use: "list [resource]", - Short: "List the resources currently registered to feast.", - Long: `Prints a list of all of the resource type currently registered to feast. - -Valid resources include: -- entities -- features -- jobs - -Examples: -- feast list entities`, - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) == 0 { - return cmd.Help() - } - - if len(args) != 1 { - return errors.New("invalid number of arguments for list command") - } - - initConn() - err := list(args[0]) - if err != nil { - return fmt.Errorf("failed to list %s: %v", args[0], err) - } - return nil - }, -} - -func init() { - rootCmd.AddCommand(listCmd) -} - -func list(resource string) error { - ctx := context.Background() - - switch resource { - case "features": - return listFeatures(ctx, core.NewCoreServiceClient(coreConn)) - case "entities": - return listEntities(ctx, core.NewCoreServiceClient(coreConn)) - case "jobs": - return listJobs(ctx, core.NewJobServiceClient(coreConn)) - case "storage": - return listStorage(ctx, core.NewCoreServiceClient(coreConn)) - default: - return fmt.Errorf("invalid resource %s: please choose one of [features, storage, entities, jobs]", resource) - } -} - -func listFeatures(ctx context.Context, cli core.CoreServiceClient) error { - response, err := cli.ListFeatures(ctx, &empty.Empty{}) - if err != nil { - return err - } - w := new(tabwriter.Writer) - w.Init(os.Stdout, 0, 8, 2, ' ', 0) - fmt.Fprintf(w, "ID\tDESCRIPTION\tOWNER\n") - fmt.Fprintf(w, "--\t-----------\t-----\n") - for _, feat := range response.GetFeatures() { - fmt.Fprintf(w, strings.Join( - []string{feat.Id, feat.Description, feat.Owner}, "\t")+"\n") - } - w.Flush() - return nil -} - -func listEntities(ctx context.Context, cli core.CoreServiceClient) error { - response, err := cli.ListEntities(ctx, &empty.Empty{}) - if err != nil { - return err - } - w := new(tabwriter.Writer) - w.Init(os.Stdout, 0, 8, 2, ' ', 0) - fmt.Fprintf(w, "ID\tDESCRIPTION\n") - fmt.Fprintf(w, "--\t-----------\t\n") - for _, ent := range response.GetEntities() { - fmt.Fprintf(w, strings.Join( - []string{ent.Name, ent.Description}, "\t")+"\n") - } - w.Flush() - return nil -} - -func listJobs(ctx context.Context, cli core.JobServiceClient) error { - response, err := cli.ListJobs(ctx, &empty.Empty{}) - if err != nil { - return err - } - w := new(tabwriter.Writer) - w.Init(os.Stdout, 0, 8, 2, ' ', 0) - fmt.Fprintf(w, "JOB_ID\tTYPE\tRUNNER\tSTATUS\tAGE\n") - fmt.Fprintf(w, "------\t----\t------\t------\t---\n") - for _, job := range response.GetJobs() { - fmt.Fprintf(w, strings.Join( - []string{job.Id, job.Type, job.Runner, job.Status, timeutil.DurationUntilNowInHumanFormat(*job.Created)}, "\t")+"\n") - } - w.Flush() - return nil -} - -// This function is deprecated, and may be removed in subsequent versions. -func listStorage(ctx context.Context, cli core.CoreServiceClient) error { - response, err := cli.ListStorage(ctx, &empty.Empty{}) - if err != nil { - return err - } - w := new(tabwriter.Writer) - w.Init(os.Stdout, 0, 8, 2, ' ', 0) - fmt.Fprintf(w, "ID\tTYPE\n") - fmt.Fprintf(w, "--\t----\t\n") - for _, feat := range response.GetStorageSpecs() { - fmt.Fprintf(w, strings.Join( - []string{feat.Id, feat.Type}, "\t")+"\n") - } - w.Flush() - return nil -} diff --git a/cli/feast/cmd/mock/README.md b/cli/feast/cmd/mock/README.md deleted file mode 100644 index 3036d5856ab..00000000000 --- a/cli/feast/cmd/mock/README.md +++ /dev/null @@ -1,11 +0,0 @@ -Mock GRPC client is created using this library -https://github.com/golang/mock - -An example to generate `mock_core.go`: - -``` -cd $FEAST_REPO/cli/feast -mockgen --package mock github.com/gojek/feast/protos/generated/go/feast/core CoreServiceClient > cmd/mock/core_service.go -``` - -> We need to re-run `mockgen` whenever the protos for the services are updated \ No newline at end of file diff --git a/cli/feast/cmd/mock/core_service.go b/cli/feast/cmd/mock/core_service.go deleted file mode 100644 index 5a3389300a4..00000000000 --- a/cli/feast/cmd/mock/core_service.go +++ /dev/null @@ -1,200 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/gojek/feast/protos/generated/go/feast/core (interfaces: CoreServiceClient) - -// Package mock is a generated GoMock package. -package mock - -import ( - context "context" - core "github.com/gojek/feast/protos/generated/go/feast/core" - specs "github.com/gojek/feast/protos/generated/go/feast/specs" - gomock "github.com/golang/mock/gomock" - empty "github.com/golang/protobuf/ptypes/empty" - grpc "google.golang.org/grpc" - reflect "reflect" -) - -// MockCoreServiceClient is a mock of CoreServiceClient interface -type MockCoreServiceClient struct { - ctrl *gomock.Controller - recorder *MockCoreServiceClientMockRecorder -} - -// MockCoreServiceClientMockRecorder is the mock recorder for MockCoreServiceClient -type MockCoreServiceClientMockRecorder struct { - mock *MockCoreServiceClient -} - -// NewMockCoreServiceClient creates a new mock instance -func NewMockCoreServiceClient(ctrl *gomock.Controller) *MockCoreServiceClient { - mock := &MockCoreServiceClient{ctrl: ctrl} - mock.recorder = &MockCoreServiceClientMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockCoreServiceClient) EXPECT() *MockCoreServiceClientMockRecorder { - return m.recorder -} - -// ApplyEntity mocks base method -func (m *MockCoreServiceClient) ApplyEntity(arg0 context.Context, arg1 *specs.EntitySpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyEntityResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ApplyEntity", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyEntityResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ApplyEntity indicates an expected call of ApplyEntity -func (mr *MockCoreServiceClientMockRecorder) ApplyEntity(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyEntity", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyEntity), varargs...) -} - -// ApplyFeature mocks base method -func (m *MockCoreServiceClient) ApplyFeature(arg0 context.Context, arg1 *specs.FeatureSpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyFeatureResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ApplyFeature", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyFeatureResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ApplyFeature indicates an expected call of ApplyFeature -func (mr *MockCoreServiceClientMockRecorder) ApplyFeature(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyFeature", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyFeature), varargs...) -} - -// ApplyFeatureGroup mocks base method -func (m *MockCoreServiceClient) ApplyFeatureGroup(arg0 context.Context, arg1 *specs.FeatureGroupSpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyFeatureGroupResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ApplyFeatureGroup", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyFeatureGroupResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ApplyFeatureGroup indicates an expected call of ApplyFeatureGroup -func (mr *MockCoreServiceClientMockRecorder) ApplyFeatureGroup(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyFeatureGroup", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyFeatureGroup), varargs...) -} - -// GetEntities mocks base method -func (m *MockCoreServiceClient) GetEntities(arg0 context.Context, arg1 *core.CoreServiceTypes_GetEntitiesRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetEntitiesResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetEntities", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_GetEntitiesResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetEntities indicates an expected call of GetEntities -func (mr *MockCoreServiceClientMockRecorder) GetEntities(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEntities", reflect.TypeOf((*MockCoreServiceClient)(nil).GetEntities), varargs...) -} - -// GetFeatures mocks base method -func (m *MockCoreServiceClient) GetFeatures(arg0 context.Context, arg1 *core.CoreServiceTypes_GetFeaturesRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetFeaturesResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetFeatures", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_GetFeaturesResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetFeatures indicates an expected call of GetFeatures -func (mr *MockCoreServiceClientMockRecorder) GetFeatures(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFeatures", reflect.TypeOf((*MockCoreServiceClient)(nil).GetFeatures), varargs...) -} - -// GetStorage mocks base method -func (m *MockCoreServiceClient) GetStorage(arg0 context.Context, arg1 *core.CoreServiceTypes_GetStorageRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetStorageResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetStorage", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_GetStorageResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetStorage indicates an expected call of GetStorage -func (mr *MockCoreServiceClientMockRecorder) GetStorage(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStorage", reflect.TypeOf((*MockCoreServiceClient)(nil).GetStorage), varargs...) -} - -// ListEntities mocks base method -func (m *MockCoreServiceClient) ListEntities(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListEntitiesResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ListEntities", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ListEntitiesResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListEntities indicates an expected call of ListEntities -func (mr *MockCoreServiceClientMockRecorder) ListEntities(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListEntities", reflect.TypeOf((*MockCoreServiceClient)(nil).ListEntities), varargs...) -} - -// ListFeatures mocks base method -func (m *MockCoreServiceClient) ListFeatures(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListFeaturesResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ListFeatures", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ListFeaturesResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListFeatures indicates an expected call of ListFeatures -func (mr *MockCoreServiceClientMockRecorder) ListFeatures(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListFeatures", reflect.TypeOf((*MockCoreServiceClient)(nil).ListFeatures), varargs...) -} - -// ListStorage mocks base method -func (m *MockCoreServiceClient) ListStorage(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListStorageResponse, error) { - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "ListStorage", varargs...) - ret0, _ := ret[0].(*core.CoreServiceTypes_ListStorageResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListStorage indicates an expected call of ListStorage -func (mr *MockCoreServiceClientMockRecorder) ListStorage(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListStorage", reflect.TypeOf((*MockCoreServiceClient)(nil).ListStorage), varargs...) -} diff --git a/cli/feast/cmd/root.go b/cli/feast/cmd/root.go deleted file mode 100644 index 18ab4d6a5d4..00000000000 --- a/cli/feast/cmd/root.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "os" - - "github.com/spf13/cobra" -) - -// rootCmd represents the base command when called without any subcommands -var rootCmd = &cobra.Command{ - Use: "feast", - Short: "Utility to manage Feature Store", -} - -// Execute adds all child commands to the root command and sets flags appropriately. -// This is called by main.main(). It only needs to happen once to the rootCmd. -func Execute() { - defer closeConn() - if err := rootCmd.Execute(); err != nil { - handleErr(err) - } -} - -func init() { - cobra.OnInitialize(initConfig) -} - -func handleErr(err error) { - if err != nil { - os.Exit(1) - } -} diff --git a/cli/feast/cmd/testdata/invalid_entity.yaml b/cli/feast/cmd/testdata/invalid_entity.yaml deleted file mode 100644 index 4fafacc87b0..00000000000 --- a/cli/feast/cmd/testdata/invalid_entity.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name: myentity -description: test entity with tag -tags: - - tag1 - tag2 \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_entity.yaml b/cli/feast/cmd/testdata/valid_entity.yaml deleted file mode 100644 index 97f215711f0..00000000000 --- a/cli/feast/cmd/testdata/valid_entity.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name : myentity -description: test entity with tag -tags: - - tag1 - - tag2 \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_entity_no_tag.yaml b/cli/feast/cmd/testdata/valid_entity_no_tag.yaml deleted file mode 100644 index 828a768b291..00000000000 --- a/cli/feast/cmd/testdata/valid_entity_no_tag.yaml +++ /dev/null @@ -1,2 +0,0 @@ -name : myentity -description: test entity without tag \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_feature.yaml b/cli/feast/cmd/testdata/valid_feature.yaml deleted file mode 100644 index 824b1a1ce34..00000000000 --- a/cli/feast/cmd/testdata/valid_feature.yaml +++ /dev/null @@ -1,7 +0,0 @@ -id: myentity.feature_bool_redis1 -name: feature_bool_redis1 -entity: myentity -owner: bob@example.com -description: test entity. -valueType: BOOL -uri: https://github.com/bob/example \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_feature_group.yaml b/cli/feast/cmd/testdata/valid_feature_group.yaml deleted file mode 100644 index f877d209fec..00000000000 --- a/cli/feast/cmd/testdata/valid_feature_group.yaml +++ /dev/null @@ -1,7 +0,0 @@ -id: my_fg -tags: ["tag1", "tag2"] -dataStores: - serving: - id: "REDIS1" - warehouse: - id: "BIGQUERY1" \ No newline at end of file diff --git a/cli/feast/cmd/version.go b/cli/feast/cmd/version.go deleted file mode 100644 index 43ac93ab903..00000000000 --- a/cli/feast/cmd/version.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cmd - -import ( - "fmt" - - "github.com/spf13/cobra" -) - -// Version is the cli version, injected at compile time -var version string - -var versionCmd = &cobra.Command{ - Use: "version", - Short: "feast cli version", - Run: func(cmd *cobra.Command, args []string) { - fmt.Println(version) - }, -} - -func init() { - rootCmd.AddCommand(versionCmd) -} - -// SetVersion sets the version to the given version. -func SetVersion(v string) { - version = v -} diff --git a/cli/feast/main.go b/cli/feast/main.go deleted file mode 100644 index ccaefa42724..00000000000 --- a/cli/feast/main.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/gojek/feast/cli/feast/cmd" -) - -// Version is the Feast version -var Version string - -func main() { - cmd.SetVersion(Version) - cmd.Execute() -} diff --git a/cli/feast/pkg/parse/yaml.go b/cli/feast/pkg/parse/yaml.go deleted file mode 100644 index a86054c930b..00000000000 --- a/cli/feast/pkg/parse/yaml.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package parse - -import ( - "encoding/json" - "time" - - "github.com/golang/protobuf/ptypes/timestamp" - - "github.com/ghodss/yaml" - - "github.com/gojek/feast/protos/generated/go/feast/specs" - "github.com/gojek/feast/protos/generated/go/feast/types" -) - -// YamlToFeatureSpec parses the given yaml and outputs the corresponding -// feature spec, if possible. -func YamlToFeatureSpec(in []byte) (*specs.FeatureSpec, error) { - var ymlMap map[string]interface{} - err := yaml.Unmarshal(in, &ymlMap) - if err != nil { - return nil, err - } - ymlMap["valueType"] = valueTypeOf(ymlMap["valueType"].(string)) - if err != nil { - return nil, err - } - yml, err := yaml.Marshal(ymlMap) - if err != nil { - return nil, err - } - j, err := yaml.YAMLToJSON(yml) - if err != nil { - return nil, err - } - var fs specs.FeatureSpec - err = json.Unmarshal(j, &fs) - return &fs, err -} - -// YamlToEntitySpec parses the given yaml and returns the corresponding entity spec, -// if possible. -func YamlToEntitySpec(in []byte) (*specs.EntitySpec, error) { - j, err := yaml.YAMLToJSON(in) - if err != nil { - return nil, err - } - var es specs.EntitySpec - err = json.Unmarshal(j, &es) - return &es, err -} - -// YamlToFeatureGroupSpec parses the given yaml and returns the corresponding feature -// group spec, if possible. -func YamlToFeatureGroupSpec(in []byte) (*specs.FeatureGroupSpec, error) { - j, err := yaml.YAMLToJSON(in) - if err != nil { - return nil, err - } - var fgs specs.FeatureGroupSpec - err = json.Unmarshal(j, &fgs) - return &fgs, err -} - -// YamlToImportSpec parses the given yaml and returns the corresponding import -// spec, if possible. -func YamlToImportSpec(in []byte) (*specs.ImportSpec, error) { - var ymlMap map[string]interface{} - err := yaml.Unmarshal(in, &ymlMap) - if err != nil { - return nil, err - } - - // either timestampValue or timestampColumn - var timestampValue *timestamp.Timestamp - var timestampColumn string - if ymlMap["schema"] != nil { - schemaYmlMap := ymlMap["schema"].(map[string]interface{}) - if ts, ok := schemaYmlMap["timestampValue"]; ok { - t, err := time.Parse("2006-01-02T15:04:05.000Z", ts.(string)) - if err != nil { - return nil, err - } - timestampValue = ×tamp.Timestamp{Seconds: t.Unix()} - } - if ts, ok := schemaYmlMap["timestampColumn"]; ok { - timestampColumn = ts.(string) - } - if err != nil { - return nil, err - } - } - - j, err := yaml.YAMLToJSON(in) - if err != nil { - return nil, err - } - var is specs.ImportSpec - err = json.Unmarshal(j, &is) - if timestampValue != nil { - is.Schema.Timestamp = &specs.Schema_TimestampValue{TimestampValue: timestampValue} - } else if timestampColumn != "" { - is.Schema.Timestamp = &specs.Schema_TimestampColumn{TimestampColumn: timestampColumn} - } - return &is, err -} - -func valueTypeOf(str string) types.ValueType_Enum { - return types.ValueType_Enum(types.ValueType_Enum_value[str]) -} diff --git a/cli/feast/pkg/parse/yaml_test.go b/cli/feast/pkg/parse/yaml_test.go deleted file mode 100644 index 8b19879791a..00000000000 --- a/cli/feast/pkg/parse/yaml_test.go +++ /dev/null @@ -1,234 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package parse - -import ( - "testing" - - "github.com/golang/protobuf/ptypes/timestamp" - - "github.com/gojek/feast/protos/generated/go/feast/specs" - "github.com/gojek/feast/protos/generated/go/feast/types" - - "github.com/google/go-cmp/cmp" -) - -func TestYamlToFeatureSpec(t *testing.T) { - tt := []struct { - name string - input []byte - expected *specs.FeatureSpec - err error - }{ - { - name: "valid yaml", - input: []byte(`id: test.test_feature_two -name: test_feature_two -entity: test -owner: bob@example.com -description: testing feature -valueType: INT64 -uri: https://github.com/bob/example`), - expected: &specs.FeatureSpec{ - Id: "test.test_feature_two", - Owner: "bob@example.com", - Name: "test_feature_two", - Description: "testing feature", - Uri: "https://github.com/bob/example", - ValueType: types.ValueType_INT64, - Entity: "test", - }, - err: nil, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - spec, err := YamlToFeatureSpec(tc.input) - if tc.err == nil { - if err != nil { - t.Error(err) - } else if !cmp.Equal(spec, tc.expected) { - t.Errorf("Expected %s, got %s", tc.expected, spec) - } - } else { - // we expect an error - if err == nil { - t.Error(err) - } else if err.Error() != tc.err.Error() { - t.Errorf("Expected error %v, got %v", err.Error(), tc.err.Error()) - } - } - }) - } -} - -func TestYamlToEntitySpec(t *testing.T) { - tt := []struct { - name string - input []byte - expected *specs.EntitySpec - err error - }{ - { - name: "valid yaml", - input: []byte(`name: test -description: test entity -tags: -- tag1 -- tag2`), - expected: &specs.EntitySpec{ - Name: "test", - Description: "test entity", - Tags: []string{"tag1", "tag2"}, - }, - err: nil, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - spec, err := YamlToEntitySpec(tc.input) - if tc.err == nil { - if err != nil { - t.Error(err) - } else if !cmp.Equal(spec, tc.expected) { - t.Errorf("Expected %s, got %s", tc.expected, spec) - } - } else { - // we expect an error - if err == nil { - t.Error(err) - } else if err.Error() != tc.err.Error() { - t.Errorf("Expected error %v, got %v", err.Error(), tc.err.Error()) - } - } - }) - } -} - -func TestYamlToFeatureGroupSpec(t *testing.T) { - tt := []struct { - name string - input []byte - expected *specs.FeatureGroupSpec - err error - }{ - { - name: "valid yaml", - input: []byte(`id: test -tags: -- tag1 -- tag2`), - expected: &specs.FeatureGroupSpec{ - Id: "test", - Tags: []string{"tag1", "tag2"}, - }, - err: nil, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - spec, err := YamlToFeatureGroupSpec(tc.input) - if tc.err == nil { - if err != nil { - t.Error(err) - } else if !cmp.Equal(spec, tc.expected) { - t.Errorf("Expected %s, got %s", tc.expected, spec) - } - } else { - // we expect an error - if err == nil { - t.Error(err) - } else if err.Error() != tc.err.Error() { - t.Errorf("Expected error %v, got %v", err.Error(), tc.err.Error()) - } - } - }) - } -} - -func TestYamlToImportSpec(t *testing.T) { - tt := []struct { - name string - input []byte - expected *specs.ImportSpec - err error - }{ - { - name: "valid yaml", - input: []byte(`type: file -jobOptions: - coalesceRows.enabled: "true" -sourceOptions: - format: csv - path: jaeger_last_opportunity_sample.csv -entities: - - driver -schema: - entityIdColumn: driver_id - timestampValue: 2018-09-25T00:00:00.000Z - fields: - - name: timestamp - - name: driver_id - - name: last_opportunity - featureId: driver.last_opportunity`), - expected: &specs.ImportSpec{ - Type: "file", - JobOptions: map[string]string{ - "coalesceRows.enabled": "true", - }, - SourceOptions: map[string]string{ - "format": "csv", - "path": "jaeger_last_opportunity_sample.csv", - }, - Entities: []string{"driver"}, - Schema: &specs.Schema{ - Fields: []*specs.Field{ - {Name: "timestamp"}, - {Name: "driver_id"}, - {Name: "last_opportunity", FeatureId: "driver.last_opportunity"}, - }, - EntityIdColumn: "driver_id", - Timestamp: &specs.Schema_TimestampValue{ - TimestampValue: ×tamp.Timestamp{Seconds: 1537833600}, - }, - }, - }, - err: nil, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - spec, err := YamlToImportSpec(tc.input) - if tc.err == nil { - if err != nil { - t.Error(err) - } else if !cmp.Equal(spec, tc.expected) { - t.Errorf("Expected %s, got %s", tc.expected, spec) - } - } else { - // we expect an error - if err == nil { - t.Error(err) - } else if err.Error() != tc.err.Error() { - t.Errorf("Expected error %v, got %v", err.Error(), tc.err.Error()) - } - } - }) - } -} diff --git a/cli/feast/pkg/printer/job.go b/cli/feast/pkg/printer/job.go deleted file mode 100644 index 45af68519bf..00000000000 --- a/cli/feast/pkg/printer/job.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package printer - -import ( - "fmt" - "strings" - - "github.com/gojek/feast/cli/feast/pkg/timeutil" - "github.com/gojek/feast/protos/generated/go/feast/core" -) - -// PrintJobDetail pretty prints the given job detail. -// Prints and returns the resultant formatted string. -func PrintJobDetail(jobDetail *core.JobServiceTypes_JobDetail) string { - lines := []string{fmt.Sprintf("%s:\t%s", "Id", jobDetail.GetId()), - fmt.Sprintf("%s:\t%s", "Ext Id", jobDetail.GetExtId()), - fmt.Sprintf("%s:\t%s", "Type", jobDetail.GetType()), - fmt.Sprintf("%s:\t%s", "Runner", jobDetail.GetRunner()), - fmt.Sprintf("%s:\t%s", "Status", jobDetail.GetStatus()), - fmt.Sprintf("%s:\t%s", "Age", timeutil.DurationUntilNowInHumanFormat(*jobDetail.GetCreated())), - } - lines = append(lines, "Metrics:") - for k, v := range jobDetail.GetMetrics() { - split := strings.Split(k, ":") - if split[0] == "row" { - lines = append(lines, fmt.Sprintf(" %s: %.1f", strings.Join(split[1:], ""), v)) - } - } - lines = append(lines, "Entities:") - for _, entity := range jobDetail.GetEntities() { - lines = append(lines, printEntity(entity, jobDetail.GetMetrics())) - } - lines = append(lines, "Features:") - for _, feature := range jobDetail.GetFeatures() { - lines = append(lines, printFeature(feature, jobDetail.GetMetrics())) - } - out := strings.Join(lines, "\n") - fmt.Println(out) - return out -} - -func printEntity(entityName string, metrics map[string]float64) string { - lines := []string{ - fmt.Sprintf("- Name: %s", entityName), - " Metrics: ", - } - for k, v := range metrics { - split := strings.Split(k, ":") - if split[0] == "entity" { - if split[1] == entityName { - lines = append(lines, fmt.Sprintf(" %s: %.1f", strings.Join(split[2:], ""), v)) - } - } - } - return strings.Join(lines, "\n") -} - -func printFeature(featureName string, metrics map[string]float64) string { - lines := []string{ - fmt.Sprintf("- Id: %s", featureName), - " Metrics: ", - } - for k, v := range metrics { - split := strings.Split(k, ":") - if split[0] == "feature" { - if split[1] == featureName { - lines = append(lines, fmt.Sprintf(" %s: %.1f", strings.Join(split[2:], ""), v)) - } - } - } - return strings.Join(lines, "\n") -} diff --git a/cli/feast/pkg/printer/job_test.go b/cli/feast/pkg/printer/job_test.go deleted file mode 100644 index 4b930a56d7b..00000000000 --- a/cli/feast/pkg/printer/job_test.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package printer - -import ( - "testing" -) - -func Test_printEntity(t *testing.T) { - type args struct { - entityName string - metrics map[string]float64 - } - tests := []struct { - name string - args args - want string - }{ - {"1 feature 1 metric value", args{"entity1", map[string]float64{"entity:entity1:value1": 89.0}}, "- Name: entity1\n Metrics: \n value1: 89.0"}, - {"1 feature 2 metric value", args{"entity1", map[string]float64{"entity:entity1:value1": 89.0, "entity:entity1:value2": 90.0}}, "- Name: entity1\n Metrics: \n value1: 89.0\n value2: 90.0"}, - {"1 feature 0 metric value", args{"entity1", map[string]float64{"entity:entity2:value1": 89.0, "entity:entity3:value2": 90.0}}, "- Name: entity1\n Metrics: "}, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := printEntity(tt.args.entityName, tt.args.metrics); got != tt.want { - t.Errorf("printEntity() = %v, want %v", got, tt.want) - } - }) - } -} - -func Test_printFeature(t *testing.T) { - type args struct { - featureName string - metrics map[string]float64 - } - tests := []struct { - name string - args args - want string - }{ - {"1 feature 1 metric value", args{"feature1", map[string]float64{"feature:feature1:value1": 89.0, "feature:feature2:value1": 90.0}}, "- Id: feature1\n Metrics: \n value1: 89.0"}, - {"1 feature 2 metric value", args{"feature1", map[string]float64{"feature:feature1:value1": 89.0, "feature:feature1:value2": 90.0}}, "- Id: feature1\n Metrics: \n value1: 89.0\n value2: 90.0"}, - {"1 feature 0 metric value", args{"feature1", map[string]float64{"feature:feature2:value1": 89.0, "feature:feature3:value1": 90.0}}, "- Id: feature1\n Metrics: "}, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := printFeature(tt.args.featureName, tt.args.metrics); got != tt.want { - t.Errorf("printEntity() = %v, want %v", got, tt.want) - println(len(got), len(tt.want)) - } - }) - } -} diff --git a/cli/feast/pkg/printer/specs.go b/cli/feast/pkg/printer/specs.go deleted file mode 100644 index 7cce0648eea..00000000000 --- a/cli/feast/pkg/printer/specs.go +++ /dev/null @@ -1,77 +0,0 @@ -package printer - -import ( - "fmt" - "strings" - - "github.com/gojek/feast/cli/feast/pkg/timeutil" - "github.com/gojek/feast/protos/generated/go/feast/core" -) - -// PrintFeatureDetail prints the details about the feature. -// Prints and returns the resultant formatted string. -func PrintFeatureDetail(featureDetail *core.UIServiceTypes_FeatureDetail) string { - spec := featureDetail.GetSpec() - lines := []string{ - fmt.Sprintf("%s:\t%s", "Id", spec.GetId()), - fmt.Sprintf("%s:\t%s", "Entity", spec.GetEntity()), - fmt.Sprintf("%s:\t%s", "Owner", spec.GetOwner()), - fmt.Sprintf("%s:\t%s", "Description", spec.GetDescription()), - fmt.Sprintf("%s:\t%s", "ValueType", spec.GetValueType()), - fmt.Sprintf("%s:\t%s", "Uri", spec.GetUri()), - } - lines = append(lines, fmt.Sprintf("%s:\t%s", "Created", timeutil.FormatToRFC3339(*featureDetail.GetCreated()))) - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*featureDetail.GetLastUpdated()))) - if jobs := featureDetail.GetJobs(); len(jobs) > 0 { - lines = append(lines, "Related Jobs:") - for _, job := range jobs { - lines = append(lines, fmt.Sprintf("- %s", job)) - } - } - if tags := spec.GetTags(); len(tags) > 0 { - lines = append(lines, fmt.Sprintf("Tags: %s", strings.Join(tags, ","))) - } - out := strings.Join(lines, "\n") - fmt.Println(out) - return out -} - -// PrintEntityDetail prints the details about the feature. -// Prints and returns the resultant formatted string. -func PrintEntityDetail(entityDetail *core.UIServiceTypes_EntityDetail) string { - spec := entityDetail.GetSpec() - lines := []string{ - fmt.Sprintf("%s:\t%s", "Name", spec.GetName()), - fmt.Sprintf("%s:\t%s", "Description", spec.GetDescription()), - } - if tags := spec.GetTags(); len(tags) > 0 { - lines = append(lines, fmt.Sprintf("Tags: %s", strings.Join(tags, ","))) - } - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*entityDetail.GetLastUpdated()))) - lines = append(lines, "Related Jobs:") - for _, job := range entityDetail.GetJobs() { - lines = append(lines, fmt.Sprintf("- %s", job)) - } - out := strings.Join(lines, "\n") - fmt.Println(out) - return out -} - -// PrintStorageDetail prints the details about the feature. -// Prints and returns the resultant formatted string. -// This function is deprecated, and may be removed in subsequent versions. -func PrintStorageDetail(storageDetail *core.UIServiceTypes_StorageDetail) string { - spec := storageDetail.GetSpec() - lines := []string{ - fmt.Sprintf("%s:\t%s", "Id", spec.GetId()), - fmt.Sprintf("%s:\t%s", "Type", spec.GetType()), - fmt.Sprintf("Options:"), - } - for k, v := range spec.GetOptions() { - lines = append(lines, fmt.Sprintf(" %s: %s", k, v)) - } - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*storageDetail.GetLastUpdated()))) - out := strings.Join(lines, "\n") - fmt.Println(out) - return out -} diff --git a/cli/feast/pkg/printer/specs_test.go b/cli/feast/pkg/printer/specs_test.go deleted file mode 100644 index d605bb7f95a..00000000000 --- a/cli/feast/pkg/printer/specs_test.go +++ /dev/null @@ -1,142 +0,0 @@ -package printer - -import ( - "fmt" - "strings" - "testing" - - "github.com/gojek/feast/cli/feast/pkg/timeutil" - - "github.com/golang/protobuf/ptypes/timestamp" - - "github.com/gojek/feast/protos/generated/go/feast/core" - "github.com/gojek/feast/protos/generated/go/feast/specs" - "github.com/gojek/feast/protos/generated/go/feast/types" -) - -func TestPrintFeature(t *testing.T) { - tt := []struct { - name string - input *core.UIServiceTypes_FeatureDetail - expected string - }{ - { - name: "feature", - input: &core.UIServiceTypes_FeatureDetail{ - Spec: &specs.FeatureSpec{ - Id: "test.test_feature_two", - Owner: "bob@example.com", - Name: "test_feature_two", - Description: "testing feature", - Uri: "https://github.com/bob/example", - ValueType: types.ValueType_INT64, - Entity: "test", - }, - BigqueryView: "bqurl", - Jobs: []string{"job1", "job2"}, - LastUpdated: ×tamp.Timestamp{Seconds: 1}, - Created: ×tamp.Timestamp{Seconds: 1}, - }, - expected: fmt.Sprintf(`Id: test.test_feature_two -Entity: test -Owner: bob@example.com -Description: testing feature -ValueType: INT64 -Uri: https://github.com/bob/example -Created: %s -LastUpdated: %s -Related Jobs: -- job1 -- job2`, - timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1}), - timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})), - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - out := PrintFeatureDetail(tc.input) - if out != tc.expected { - t.Errorf("Expected output:\n%s \nActual:\n%s \n", tc.expected, out) - } - }) - } -} - -func TestPrintEntity(t *testing.T) { - entityDetail := &core.UIServiceTypes_EntityDetail{ - Spec: &specs.EntitySpec{ - Name: "test", - Description: "my test entity", - Tags: []string{"tag1", "tag2"}, - }, - Jobs: []string{"job1", "job2"}, - LastUpdated: ×tamp.Timestamp{Seconds: 1}, - } - out := PrintEntityDetail(entityDetail) - expected := fmt.Sprintf(`Name: test -Description: my test entity -Tags: tag1,tag2 -LastUpdated: %s -Related Jobs: -- job1 -- job2`, - timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})) - if out != expected { - t.Errorf("Expected output:\n%s \nActual:\n%s \n", expected, out) - } -} - -func TestPrintStorage(t *testing.T) { - storageDetail := &core.UIServiceTypes_StorageDetail{ - Spec: &specs.StorageSpec{ - Id: "REDIS1", - Type: "redis", - }, - LastUpdated: ×tamp.Timestamp{Seconds: 1}, - } - out := PrintStorageDetail(storageDetail) - expected := fmt.Sprintf(`Id: REDIS1 -Type: redis -Options: -LastUpdated: %s`, - timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})) - if out != expected { - t.Errorf("Expected output:\n%s \nActual:\n%s \n", expected, out) - } -} - -func TestPrintStorageWithOptions(t *testing.T) { - storageDetail := &core.UIServiceTypes_StorageDetail{ - Spec: &specs.StorageSpec{ - Id: "REDIS1", - Type: "redis", - Options: map[string]string{ - "option1": "value1", - "option2": "value2", - }, - }, - LastUpdated: ×tamp.Timestamp{Seconds: 1}, - } - out := PrintStorageDetail(storageDetail) - - // Since map iteration order in Golang is radomized and "Options" is stored in a map, - // this test just check that the output "contains" the key and value - // rather than checking the exact match - - expected := []string{ - "Id: REDIS1", - "Type: redis", - "Options:", - " option1: value1", - " option2: value2", - fmt.Sprintf("LastUpdated: %s", timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})), - } - - for _, want := range expected { - if !strings.Contains(out, want) { - t.Errorf("Missing \"%s\" from actual output\nActual output:\n%s", want, out) - } - } - -} diff --git a/cli/feast/pkg/timeutil/timeutil.go b/cli/feast/pkg/timeutil/timeutil.go deleted file mode 100644 index 379a3367e3c..00000000000 --- a/cli/feast/pkg/timeutil/timeutil.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package timeutil - -import ( - "fmt" - "math" - "time" - - "github.com/golang/protobuf/ptypes/timestamp" -) - -func DurationUntilNowInHumanFormat(timestamp timestamp.Timestamp) string { - timeSinceCreation := float64(time.Now().Unix() - timestamp.GetSeconds()) - if days := math.Floor(timeSinceCreation / float64(8.64e+4)); days > 0 { - return fmt.Sprintf("%dd", int(days)) - } else if hours := math.Floor(timeSinceCreation / float64(3.6e+3)); hours > 0 { - return fmt.Sprintf("%dh", int(hours)) - } - return fmt.Sprintf("%dm", int(math.Floor(timeSinceCreation/float64(60)))) -} - -func FormatToRFC3339(ts timestamp.Timestamp) string { - t := time.Unix(ts.GetSeconds(), int64(ts.GetNanos())) - return t.Format(time.RFC3339) -} diff --git a/cli/feast/pkg/timeutil/timeutil_test.go b/cli/feast/pkg/timeutil/timeutil_test.go deleted file mode 100644 index 907f60f5e61..00000000000 --- a/cli/feast/pkg/timeutil/timeutil_test.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package timeutil - -import ( - "testing" - "time" - - "github.com/golang/protobuf/ptypes/timestamp" -) - -func TestDurationUntilNowInHumanFormat(t *testing.T) { - type args struct { - createdTimestamp timestamp.Timestamp - } - - tests := []struct { - name string - args args - want string - }{ - {"59 second before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Second)).Unix()}}, "0m"}, - {"1 minute before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Minute)).Unix()}}, "1m"}, - {"30 minutes before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-30 * time.Duration(time.Minute)).Unix()}}, "30m"}, - {"1 hour before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Hour)).Unix()}}, "1h"}, - {"2 hours before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-2 * time.Duration(time.Hour)).Unix()}}, "2h"}, - {"1 day before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-24 * time.Duration(time.Hour)).Unix()}}, "1d"}, - {"2 days before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-48 * time.Duration(time.Hour)).Unix()}}, "2d"}, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := DurationUntilNowInHumanFormat(tt.args.createdTimestamp); got != tt.want { - t.Errorf("DurationUntilNowInHumanFormat() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/core/.gitignore b/core/.gitignore index fc6c70b453b..fc240cb6e9a 100644 --- a/core/.gitignore +++ b/core/.gitignore @@ -1,5 +1,5 @@ ### Scratch files ### -scratch* +scratch_redis* ### Local Environment ### *local*.env diff --git a/core/README.md b/core/README.md new file mode 100644 index 00000000000..8f5b6f03bf5 --- /dev/null +++ b/core/README.md @@ -0,0 +1,32 @@ +### Getting Started Guide for Feast Core Developers + +Pre-requisites: +- [Maven](https://maven.apache.org/install.html) build tool version 3.6.x +- A running Postgres instance. For easier to get started, please configure the database like so + ``` + database: postgres + user: postgres + password: password + ``` +- A running Redis instance + ``` + host: localhost + port: 6379 + ``` +- Access to Google Cloud BigQuery (optional) +- Access to Kafka brokers (to test starting ingestion jobs from Feast Core) + +Run the following maven command to start Feast Core GRPC service running on port 6565 locally +```bash +# Using configuration from src/main/resources/application.yml +mvn spring-boot:run +# Using configuration from custom location e.g. /tmp/config.application.yml +mvn spring-boot:run -Dspring.config.location=/tmp/config.application.yml +``` + +If you have [grpc_cli](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md) installed, you can check that Feast Core is running +``` +grpc_cli ls localhost:6565 +grpc_cli call localhost:6565 GetFeastCoreVersion "" +grpc_cli call localhost:6565 ListStores "" +``` \ No newline at end of file diff --git a/core/pom.xml b/core/pom.xml index bd6e2f9c4d4..e9dfee33114 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -19,179 +19,105 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 + feast feast-parent ${revision} - feast-core - jar + Feast Core + Feature registry and ingestion coordinator + feast-core - org.apache.maven.plugins - maven-shade-plugin - 2.4.3 + org.springframework.boot + spring-boot-maven-plugin - - - META-INF/spring.handlers - - - META-INF/spring.factories - - - META-INF/spring.schemas - - - META-INF/spring.components - - - feast.core.CoreApplication - - - - + false - - - package - - shade - - - - - - org.springframework.boot - spring-boot-maven-plugin - ${springBootVersion} - - org.xolstice.maven.plugins protobuf-maven-plugin - - com.github.eirslett - frontend-maven-plugin - 1.6 - - - install node and yarn - - install-node-and-yarn - - generate-resources - - - yarn install - - yarn - - - install - - - - build - - yarn - - - run build - - - - - v10.15.3 - v1.16.0 - ../ui - target - - - - - jcenter - https://jcenter.bintray.com/ - - - + + feast + feast-ingestion + ${project.version} + + + + + org.springframework.boot + spring-boot-devtools + true + + + + javax.inject + javax.inject + 1 + org.springframework.boot spring-boot-starter-web - ${springBootVersion} - - - org.springframework.boot - spring-boot-starter-logging - - org.springframework.boot spring-boot-starter-log4j2 - ${springBootVersion} - + - org.lognet + io.github.lognet grpc-spring-boot-starter - 2.4.1 org.springframework.boot spring-boot-starter-data-jpa - ${springBootVersion} org.springframework.boot spring-boot-starter-actuator - ${springBootVersion} - - + - io.grpc - grpc-netty - ${grpcVersion} + org.springframework.boot + spring-boot-configuration-processor + io.grpc grpc-services - ${grpcVersion} io.grpc grpc-stub - ${grpcVersion} com.google.protobuf protobuf-java-util - ${protobufVersion} com.google.guava guava - 26.0-jre @@ -199,18 +125,6 @@ gson 2.8.5 - - - commons-codec - commons-codec - 1.10 - - - - joda-time - joda-time - 2.9.9 - com.google.api-client google-api-client @@ -222,141 +136,37 @@ v1b3-rev266-1.25.0 - - - org.jdbi - jdbi3 - 3.0.0-beta2 - - - - org.jdbi - jdbi3-sqlobject - 3.4.0 - - - - org.postgresql - postgresql - 42.2.5 - org.hibernate hibernate-core 5.3.6.Final - - - com.google.cloud.bigtable - bigtable-hbase-2.x-shaded - 1.5.0 - - - - com.google.cloud - google-cloud-bigquery - 1.48.0 - - - com.google.cloud - google-cloud-nio - 0.83.0-alpha - - - org.apache.flink - flink-clients_2.11 - 1.5.5 - - - - com.fasterxml.jackson.core - jackson-databind - 2.9.9 - - - com.fasterxml.jackson.core - jackson-core - 2.9.9 - - - com.fasterxml.jackson.core - jackson-annotations - 2.9.9 - + - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - 2.9.9 + org.postgresql + postgresql + provided + true - - - com.github.spullara.mustache.java - compiler - 0.9.5 - - - com.hubspot.jinjava - jinjava - 2.4.12 - - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-annotations - - - - - - io.micrometer - micrometer-core - 1.0.7 - - - io.micrometer - micrometer-registry-statsd - 1.0.7 - - - com.datadoghq - java-dogstatsd-client - 2.6.1 + org.apache.kafka + kafka-clients org.projectlombok lombok - 1.18.2 - provided - - - io.grpc - grpc-testing - ${grpcVersion} - test - - org.hamcrest - hamcrest-all - 1.3 - test + hamcrest-library + com.jayway.jsonpath @@ -370,37 +180,16 @@ 2.23.0 test - - com.squareup.okhttp3 - mockwebserver - 3.11.0 - test - - - org.springframework - spring-test - 5.0.8.RELEASE - test - org.springframework.boot spring-boot-test - ${springBootVersion} test org.springframework.boot spring-boot-test-autoconfigure - ${springBootVersion} test - - com.h2database - h2 - 1.4.198 - test - - diff --git a/core/src/main/java/feast/core/CoreApplication.java b/core/src/main/java/feast/core/CoreApplication.java index 6248243f6b0..993d2c49ef7 100644 --- a/core/src/main/java/feast/core/CoreApplication.java +++ b/core/src/main/java/feast/core/CoreApplication.java @@ -17,15 +17,18 @@ package feast.core; +import feast.core.config.FeastProperties; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; +import org.springframework.scheduling.annotation.EnableScheduling; @EnableScheduling @SpringBootApplication @EnableJpaRepositories(basePackages = "feast.core.dao") +@EnableConfigurationProperties(FeastProperties.class) @Slf4j public class CoreApplication { public static void main(String[] args) { diff --git a/core/src/main/java/feast/core/OnContextRefresh.java b/core/src/main/java/feast/core/OnContextRefresh.java deleted file mode 100644 index 39d7635f0c5..00000000000 --- a/core/src/main/java/feast/core/OnContextRefresh.java +++ /dev/null @@ -1,29 +0,0 @@ -package feast.core; - -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.storage.SchemaManager; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.event.ContextRefreshedEvent; -import org.springframework.context.event.EventListener; -import org.springframework.stereotype.Component; - -@Slf4j -@Component -public class OnContextRefresh { - - @Autowired - private SchemaManager schemaManager; - @Autowired - private StorageSpecs storageSpecs; - - @EventListener - public void onApplicationEvent(ContextRefreshedEvent event) { - if (storageSpecs.getServingStorageSpec() != null) { - schemaManager.registerStorage(storageSpecs.getServingStorageSpec()); - } - if (storageSpecs.getWarehouseStorageSpec() != null) { - schemaManager.registerStorage(storageSpecs.getWarehouseStorageSpec()); - } - } -} \ No newline at end of file diff --git a/core/src/main/java/feast/core/config/AppConfig.java b/core/src/main/java/feast/core/config/AppConfig.java deleted file mode 100644 index f4533d64828..00000000000 --- a/core/src/main/java/feast/core/config/AppConfig.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.config; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -/* - * Application auto configuration - */ -@Configuration -public class AppConfig { - @Bean - public ImportJobDefaults getImportJobDefaults( - @Value("${feast.jobs.runner}") String runner, - @Value("${feast.jobs.options}") String options, - @Value("${feast.jobs.executable}") String executable, - @Value("${feast.jobs.workspace}") String workspace) { - return ImportJobDefaults.builder() - .importJobOptions(options) - .runner(runner) - .executable(executable) - .workspace(workspace) - .build(); - } -} diff --git a/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java new file mode 100644 index 00000000000..3d2d158c0d3 --- /dev/null +++ b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java @@ -0,0 +1,14 @@ +package feast.core.config; + +import io.grpc.ServerBuilder; +import io.grpc.protobuf.services.ProtoReflectionService; +import org.lognet.springboot.grpc.GRpcServerBuilderConfigurer; +import org.springframework.stereotype.Component; + +@Component +public class CoreGRpcServerBuilderConfig extends GRpcServerBuilderConfigurer { + @Override + public void configure(ServerBuilder serverBuilder){ + serverBuilder.addService(ProtoReflectionService.newInstance()); + } +} diff --git a/core/src/main/java/feast/core/config/FeastProperties.java b/core/src/main/java/feast/core/config/FeastProperties.java new file mode 100644 index 00000000000..d285abff348 --- /dev/null +++ b/core/src/main/java/feast/core/config/FeastProperties.java @@ -0,0 +1,46 @@ +package feast.core.config; + +import java.util.Map; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; + +@Getter +@Setter +@ConfigurationProperties(prefix = "feast", ignoreInvalidFields = true) +public class FeastProperties { + + private String version; + private JobProperties jobs; + private StreamProperties stream; + + @Getter + @Setter + public static class JobProperties { + + private String runner; + private Map options; + private MetricsProperties metrics; + } + + @Getter + @Setter + public static class StreamProperties { + + private String type; + private Map options; + } + + @Getter + @Setter + public static class MetricsProperties { + + private boolean enabled; + private String type; + private String host; + private int port; + } +} + + + diff --git a/core/src/main/java/feast/core/config/FeatureStreamConfig.java b/core/src/main/java/feast/core/config/FeatureStreamConfig.java new file mode 100644 index 00000000000..40344681722 --- /dev/null +++ b/core/src/main/java/feast/core/config/FeatureStreamConfig.java @@ -0,0 +1,65 @@ +package feast.core.config; + +import com.google.common.base.Strings; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.SourceType; +import feast.core.config.FeastProperties.StreamProperties; +import feast.core.model.Source; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.CreateTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.errors.TopicExistsException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Slf4j +@Configuration +public class FeatureStreamConfig { + + @Autowired + @Bean + public Source getDefaultSource(FeastProperties feastProperties) { + StreamProperties streamProperties = feastProperties.getStream(); + SourceType featureStreamType = SourceType + .valueOf(streamProperties.getType().toUpperCase()); + switch (featureStreamType) { + case KAFKA: + String bootstrapServers = streamProperties.getOptions().get("bootstrapServers"); + String topicName = streamProperties.getOptions().get("topic"); + Map map = new HashMap<>(); + map.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + map.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000"); + AdminClient client = AdminClient.create(map); + + NewTopic newTopic = new NewTopic(topicName, + Integer.valueOf(streamProperties.getOptions().getOrDefault("numPartitions", "1")), + Short.valueOf(streamProperties.getOptions().getOrDefault("replicationFactor", "1"))); + CreateTopicsResult createTopicsResult = client + .createTopics(Collections.singleton(newTopic)); + try { + createTopicsResult.values().get(topicName).get(); + } catch (InterruptedException | ExecutionException e) { + if (e.getCause().getClass().equals(TopicExistsException.class)) { + log.warn(Strings + .lenientFormat( + "Unable to create topic %s in the feature stream, topic already exists, using existing topic.", + topicName)); + } else { + throw new RuntimeException(e.getMessage(), e); + } + } + KafkaSourceConfig sourceConfig = KafkaSourceConfig.newBuilder() + .setBootstrapServers(bootstrapServers).setTopic(topicName).build(); + return new Source(featureStreamType, sourceConfig, true); + default: + throw new RuntimeException("Unsupported source stream, only [KAFKA] is supported"); + } + } +} diff --git a/core/src/main/java/feast/core/config/ImportJobDefaults.java b/core/src/main/java/feast/core/config/ImportJobDefaults.java deleted file mode 100644 index 63dbe9e5896..00000000000 --- a/core/src/main/java/feast/core/config/ImportJobDefaults.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.config; - -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -/** - * Default options for import job execution - */ -@Getter -@Setter -@Builder -public class ImportJobDefaults { - - private String importJobOptions; - private String runner; - private String workspace; - private String executable; -} - diff --git a/core/src/main/java/feast/core/config/InstrumentationConfig.java b/core/src/main/java/feast/core/config/InstrumentationConfig.java deleted file mode 100644 index eed24873556..00000000000 --- a/core/src/main/java/feast/core/config/InstrumentationConfig.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.config; - -import com.timgroup.statsd.NonBlockingStatsDClient; -import com.timgroup.statsd.StatsDClient; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class InstrumentationConfig { - @Bean - public StatsDClient getStatsDClient(@Value("${statsd.host}") String host, - @Value("${statsd.port}") int port) { - return new NonBlockingStatsDClient("feast_core", host, port); - } -} diff --git a/core/src/main/java/feast/core/config/JobConfig.java b/core/src/main/java/feast/core/config/JobConfig.java index 04b7bd8686b..e44036700ba 100644 --- a/core/src/main/java/feast/core/config/JobConfig.java +++ b/core/src/main/java/feast/core/config/JobConfig.java @@ -23,79 +23,54 @@ import com.google.api.services.dataflow.Dataflow; import com.google.api.services.dataflow.DataflowScopes; import com.google.common.base.Strings; -import com.timgroup.statsd.StatsDClient; +import feast.core.config.FeastProperties.JobProperties; import feast.core.job.JobManager; import feast.core.job.JobMonitor; import feast.core.job.NoopJobMonitor; import feast.core.job.Runner; -import feast.core.job.StatsdMetricPusher; -import feast.core.job.dataflow.DataflowJobConfig; import feast.core.job.dataflow.DataflowJobManager; import feast.core.job.dataflow.DataflowJobMonitor; +import feast.core.job.direct.DirectJobRegistry; import feast.core.job.direct.DirectRunnerJobManager; -import feast.core.job.flink.FlinkJobConfig; -import feast.core.job.flink.FlinkJobManager; -import feast.core.job.flink.FlinkJobMonitor; -import feast.core.job.flink.FlinkRestApi; +import feast.core.job.direct.DirectRunnerJobMonitor; import java.io.IOException; import java.security.GeneralSecurityException; -import java.util.List; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; -import org.apache.flink.client.cli.CliFrontend; -import org.apache.flink.client.cli.CustomCommandLine; -import org.apache.flink.configuration.GlobalConfiguration; -import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.web.client.RestTemplate; -/** Beans for job management */ +/** + * Beans for job management + */ @Slf4j @Configuration public class JobConfig { - /** - * Get configuration for dataflow connection - * - * @param projectId - * @param location - * @return DataflowJobConfig - */ - @Bean - public DataflowJobConfig getDataflowJobConfig( - @Value("${feast.jobs.dataflow.projectId}") String projectId, - @Value("${feast.jobs.dataflow.location}") String location) { - return new DataflowJobConfig(projectId, location); - } - - @Bean - public FlinkJobConfig getFlinkJobConfig( - @Value("${feast.jobs.flink.configDir}") String flinkConfigDir, - @Value("${feast.jobs.flink.masterUrl}") String flinkMasterUrl) { - return new FlinkJobConfig(flinkMasterUrl, flinkConfigDir); - } - /** * Get a JobManager according to the runner type and dataflow configuration. * - * @param runnerType runner type: one of [DataflowRunner, DirectRunner, FlinkRunner] - * @param dfConfig dataflow job configuration - * @return JobManager + * @param feastProperties feast config properties */ @Bean + @Autowired public JobManager getJobManager( - @Value("${feast.jobs.runner}") String runnerType, - DataflowJobConfig dfConfig, - FlinkJobConfig flinkConfig, - ImportJobDefaults defaults) + FeastProperties feastProperties, + DirectJobRegistry directJobRegistry) throws Exception { - Runner runner = Runner.fromString(runnerType); - + JobProperties jobProperties = feastProperties.getJobs(); + Runner runner = Runner.fromString(jobProperties.getRunner()); + if (jobProperties.getOptions() == null) { + jobProperties.setOptions(new HashMap<>()); + } + Map jobOptions = jobProperties.getOptions(); switch (runner) { case DATAFLOW: - if (Strings.isNullOrEmpty(dfConfig.getLocation()) - || Strings.isNullOrEmpty(dfConfig.getProjectId())) { + if (Strings.isNullOrEmpty(jobOptions.getOrDefault("region", null)) + || Strings.isNullOrEmpty(jobOptions.getOrDefault("project", null))) { log.error("Project and location of the Dataflow runner is not configured"); throw new IllegalStateException( "Project and location of Dataflow runner must be specified for jobs to be run on Dataflow runner."); @@ -110,7 +85,7 @@ public JobManager getJobManager( credential); return new DataflowJobManager( - dataflow, dfConfig.getProjectId(), dfConfig.getLocation(), defaults); + dataflow, jobProperties.getOptions(), jobProperties.getMetrics()); } catch (IOException e) { throw new IllegalStateException( "Unable to find credential required for Dataflow monitoring API", e); @@ -119,42 +94,31 @@ public JobManager getJobManager( } catch (Exception e) { throw new IllegalStateException("Unable to initialize DataflowJobManager", e); } - case FLINK: - org.apache.flink.configuration.Configuration configuration = - GlobalConfiguration.loadConfiguration(flinkConfig.getConfigDir()); - List> customCommandLines = - CliFrontend.loadCustomCommandLines(configuration, flinkConfig.getConfigDir()); - CliFrontend flinkCli = new CliFrontend(configuration, customCommandLines); - FlinkRestApi flinkRestApi = - new FlinkRestApi(new RestTemplate(), flinkConfig.getMasterUrl()); - return new FlinkJobManager(flinkCli, flinkConfig, flinkRestApi, defaults); case DIRECT: - return new DirectRunnerJobManager(defaults); + return new DirectRunnerJobManager(jobProperties.getOptions(), directJobRegistry, + jobProperties.getMetrics()); default: - throw new IllegalArgumentException("Unsupported runner: " + runnerType); + throw new IllegalArgumentException("Unsupported runner: " + jobProperties.getRunner()); } } /** * Get a Job Monitor given the runner type and dataflow configuration. - * - * @param runnerType runner type: one of [DataflowRunner, DirectRunner, FlinkRunner] - * @param dfConfig dataflow job configuration - * @return JobMonitor */ @Bean public JobMonitor getJobMonitor( - @Value("${feast.jobs.runner}") String runnerType, - DataflowJobConfig dfConfig, - FlinkJobConfig flinkJobConfig) + FeastProperties feastProperties, + DirectJobRegistry directJobRegistry) throws Exception { - Runner runner = Runner.fromString(runnerType); + JobProperties jobProperties = feastProperties.getJobs(); + Runner runner = Runner.fromString(jobProperties.getRunner()); + Map jobOptions = jobProperties.getOptions(); switch (runner) { case DATAFLOW: - if (Strings.isNullOrEmpty(dfConfig.getLocation()) - || Strings.isNullOrEmpty(dfConfig.getProjectId())) { + if (Strings.isNullOrEmpty(jobOptions.getOrDefault("region", null)) + || Strings.isNullOrEmpty(jobOptions.getOrDefault("project", null))) { log.warn( "Project and location of the Dataflow runner is not configured, will not do job monitoring"); return new NoopJobMonitor(); @@ -168,7 +132,8 @@ public JobMonitor getJobMonitor( JacksonFactory.getDefaultInstance(), credential); - return new DataflowJobMonitor(dataflow, dfConfig.getProjectId(), dfConfig.getLocation()); + return new DataflowJobMonitor(dataflow, jobOptions.get("project"), + jobOptions.get("region")); } catch (IOException e) { log.error( "Unable to find credential required for Dataflow monitoring API: {}", e.getMessage()); @@ -177,24 +142,18 @@ public JobMonitor getJobMonitor( } catch (Exception e) { log.error("Unable to initialize DataflowJobMonitor", e); } - case FLINK: - FlinkRestApi flinkRestApi = - new FlinkRestApi(new RestTemplate(), flinkJobConfig.getMasterUrl()); - return new FlinkJobMonitor(flinkRestApi); case DIRECT: + return new DirectRunnerJobMonitor(directJobRegistry); default: return new NoopJobMonitor(); } } /** - * Get metrics pusher to statsd - * - * @param statsDClient - * @return StatsdMetricPusher + * Get a direct job registry */ @Bean - public StatsdMetricPusher getStatsdMetricPusher(StatsDClient statsDClient) { - return new StatsdMetricPusher(statsDClient); + public DirectJobRegistry directJobRegistry() { + return new DirectJobRegistry(); } } diff --git a/core/src/main/java/feast/core/config/ServerUtilConfig.java b/core/src/main/java/feast/core/config/ServerUtilConfig.java deleted file mode 100644 index b5eb7c1eff9..00000000000 --- a/core/src/main/java/feast/core/config/ServerUtilConfig.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.config; - -import com.google.common.base.Charsets; -import com.google.common.collect.Lists; -import com.google.common.io.CharStreams; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.EntityInfoRepository; -import feast.core.dao.FeatureGroupInfoRepository; -import feast.core.dao.FeatureInfoRepository; -import feast.core.storage.BigQueryViewTemplater; -import feast.core.storage.SchemaManager; -import feast.core.validators.SpecValidator; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; - -/** - * Configuration providing utility objects for the core application. - */ -@Configuration -public class ServerUtilConfig { - - - @Autowired - private StorageSpecs storageSpecs; - - /** - * Get a BigQuery view templater. - * - * @return BigQueryViewTemplater - */ - @Bean - public BigQueryViewTemplater bigQueryViewTemplater() throws IOException { - Resource resource = new ClassPathResource("templates/bq_view.tmpl"); - InputStream resourceInputStream = resource.getInputStream(); - String tmpl = CharStreams.toString(new InputStreamReader(resourceInputStream, Charsets.UTF_8)); - return new BigQueryViewTemplater(tmpl); - } - - - /** - * Get the storage schema manager. - * - * @return SchemaManager - */ - @Bean - public SchemaManager schemaManager(BigQueryViewTemplater bigQueryViewTemplater) { - return new SchemaManager(bigQueryViewTemplater, storageSpecs); - - } - - /** - * Get a spec validator. - * - * @return SpecValidator - */ - @Bean - public SpecValidator specValidator( - EntityInfoRepository entityInfoRepository, - FeatureGroupInfoRepository featureGroupInfoRepository, - FeatureInfoRepository featureInfoRepository) { - SpecValidator specValidator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - return specValidator; - } -} diff --git a/core/src/main/java/feast/core/config/StorageConfig.java b/core/src/main/java/feast/core/config/StorageConfig.java deleted file mode 100644 index 8c9509c4582..00000000000 --- a/core/src/main/java/feast/core/config/StorageConfig.java +++ /dev/null @@ -1,83 +0,0 @@ -package feast.core.config; - -import static feast.core.util.TypeConversion.convertJsonStringToMap; - -import com.google.common.base.Strings; -import feast.core.validators.SpecValidator; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Map; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; -import lombok.NoArgsConstructor; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -@AllArgsConstructor -@NoArgsConstructor -public class StorageConfig { - - public static final String DEFAULT_SERVING_ID = "SERVING"; - public static final String DEFAULT_WAREHOUSE_ID = "WAREHOUSE"; - public static final String DEFAULT_ERRORS_ID = "ERRORS"; - - @Autowired - private SpecValidator validator; - - private StorageSpec buildStorageSpec( - String id, - String type, - String options) { - options = Strings.isNullOrEmpty(options) ? "{}" : options; - Map optionsMap = convertJsonStringToMap(options); - if (Strings.isNullOrEmpty(type)) { - return null; - } - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId(id) - .setType(type) - .putAllOptions(optionsMap) - .build(); - switch (id) { - case DEFAULT_SERVING_ID: - validator.validateServingStorageSpec(storageSpec); - break; - case DEFAULT_WAREHOUSE_ID: - validator.validateWarehouseStorageSpec(storageSpec); - break; - case DEFAULT_ERRORS_ID: - validator.validateErrorsStorageSpec(storageSpec); - break; - } - return storageSpec; - } - - @Bean - public StorageSpecs getStorageSpecs( - @Value("${feast.store.serving.type}") String servingType, - @Value("${feast.store.serving.options}") String servingOptions, - @Value("${feast.store.warehouse.type}") String warehouseType, - @Value("${feast.store.warehouse.options}") String warehouseOptions, - @Value("${feast.store.errors.type}") String errorsType, - @Value("${feast.store.errors.options}") String errorsOptions) { - StorageSpecs storageSpecs = StorageSpecs.builder() - .servingStorageSpec(buildStorageSpec(DEFAULT_SERVING_ID, servingType, servingOptions)) - .warehouseStorageSpec( - buildStorageSpec(DEFAULT_WAREHOUSE_ID, warehouseType, warehouseOptions)) - .errorsStorageSpec(buildStorageSpec(DEFAULT_ERRORS_ID, errorsType, errorsOptions)) - .build(); - return storageSpecs; - } - - @Builder - @Getter - public static class StorageSpecs { - - private StorageSpec servingStorageSpec; - private StorageSpec warehouseStorageSpec; - private StorageSpec errorsStorageSpec; - } -} diff --git a/core/src/main/java/feast/core/config/TrainingConfig.java b/core/src/main/java/feast/core/config/TrainingConfig.java deleted file mode 100644 index 9bcff052f1b..00000000000 --- a/core/src/main/java/feast/core/config/TrainingConfig.java +++ /dev/null @@ -1,42 +0,0 @@ -package feast.core.config; - -import com.google.common.base.Charsets; -import com.google.common.io.CharStreams; -import com.hubspot.jinjava.Jinjava; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.FeatureInfoRepository; -import feast.core.training.BigQueryDatasetTemplater; -import feast.core.training.BigQueryTraningDatasetCreator; -import feast.core.util.RandomUuidProvider; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; - -/** Configuration related to training API */ -@Configuration -public class TrainingConfig { - - @Bean - public BigQueryDatasetTemplater getBigQueryTrainingDatasetTemplater( - StorageSpecs storageSpecs, FeatureInfoRepository featureInfoRepository) throws IOException { - Resource resource = new ClassPathResource("templates/bq_training.tmpl"); - InputStream resourceInputStream = resource.getInputStream(); - String tmpl = CharStreams.toString(new InputStreamReader(resourceInputStream, Charsets.UTF_8)); - return new BigQueryDatasetTemplater( - new Jinjava(), tmpl, storageSpecs.getWarehouseStorageSpec(), featureInfoRepository); - } - - @Bean - public BigQueryTraningDatasetCreator getBigQueryTrainingDatasetCreator( - BigQueryDatasetTemplater templater, - @Value("${feast.core.projectId}") String projectId, - @Value("${feast.core.datasetPrefix}") String datasetPrefix) { - return new BigQueryTraningDatasetCreator( - templater, projectId, datasetPrefix, new RandomUuidProvider()); - } -} diff --git a/core/src/main/java/feast/core/dao/EntityInfoRepository.java b/core/src/main/java/feast/core/dao/EntityInfoRepository.java deleted file mode 100644 index 6a99ae2bf4e..00000000000 --- a/core/src/main/java/feast/core/dao/EntityInfoRepository.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.dao; - -import feast.core.model.EntityInfo; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -/** JPA repository supplying EntityInfo objects keyed by name. */ -@Repository -public interface EntityInfoRepository extends JpaRepository {} diff --git a/core/src/main/java/feast/core/dao/FeatureGroupInfoRepository.java b/core/src/main/java/feast/core/dao/FeatureGroupInfoRepository.java deleted file mode 100644 index 27a3ac5663b..00000000000 --- a/core/src/main/java/feast/core/dao/FeatureGroupInfoRepository.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.dao; - -import feast.core.model.FeatureGroupInfo; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -/** JPA repository supplying FeatureGroupInfo objects keyed by ID. */ -@Repository -public interface FeatureGroupInfoRepository extends JpaRepository {} diff --git a/core/src/main/java/feast/core/dao/FeatureInfoRepository.java b/core/src/main/java/feast/core/dao/FeatureInfoRepository.java deleted file mode 100644 index 4738b4a5f49..00000000000 --- a/core/src/main/java/feast/core/dao/FeatureInfoRepository.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.dao; - -import feast.core.model.FeatureInfo; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -/** JPA repository supplying FeatureInfo objects keyed by ID. */ -@Repository -public interface FeatureInfoRepository extends JpaRepository {} diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java new file mode 100644 index 00000000000..cf94bbb3c18 --- /dev/null +++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java @@ -0,0 +1,16 @@ +package feast.core.dao; + +import feast.core.model.FeatureSet; +import java.util.List; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +/** JPA repository supplying FeatureSet objects keyed by id. */ +public interface FeatureSetRepository extends JpaRepository { + // find all versions of featureSets matching the given name. + List findByName(String name); + + // find all versions of featureSets with names matching the regex + @Query(nativeQuery=true, value="SELECT * FROM feature_sets WHERE name LIKE ?1") + List findByNameWithWildcard(String name); +} diff --git a/core/src/main/java/feast/core/dao/JobInfoRepository.java b/core/src/main/java/feast/core/dao/JobInfoRepository.java index 446772a5e78..06381aa5577 100644 --- a/core/src/main/java/feast/core/dao/JobInfoRepository.java +++ b/core/src/main/java/feast/core/dao/JobInfoRepository.java @@ -28,4 +28,5 @@ @Repository public interface JobInfoRepository extends JpaRepository { List findByStatusNotIn(Collection statuses); + List findBySourceIdAndStoreName(String sourceId, String storeName); } \ No newline at end of file diff --git a/core/src/main/java/feast/core/dao/StoreRepository.java b/core/src/main/java/feast/core/dao/StoreRepository.java new file mode 100644 index 00000000000..7df7af3ad78 --- /dev/null +++ b/core/src/main/java/feast/core/dao/StoreRepository.java @@ -0,0 +1,9 @@ +package feast.core.dao; + +import feast.core.model.FeatureSet; +import feast.core.model.Store; +import org.springframework.data.jpa.repository.JpaRepository; + +/** JPA repository supplying Store objects keyed by id. */ +public interface StoreRepository extends JpaRepository { +} diff --git a/core/src/main/java/feast/core/storage/StorageInitializationException.java b/core/src/main/java/feast/core/exception/TopicExistsException.java similarity index 66% rename from core/src/main/java/feast/core/storage/StorageInitializationException.java rename to core/src/main/java/feast/core/exception/TopicExistsException.java index 21f453158c5..9d6ffe1c3c7 100644 --- a/core/src/main/java/feast/core/storage/StorageInitializationException.java +++ b/core/src/main/java/feast/core/exception/TopicExistsException.java @@ -15,18 +15,21 @@ * */ -package feast.core.storage; +package feast.core.exception; -public class StorageInitializationException extends RuntimeException { - public StorageInitializationException() { +/** + * Exception thrown when creation of a topic in the stream fails because it already exists. + */ +public class TopicExistsException extends RuntimeException { + public TopicExistsException() { super(); } - public StorageInitializationException(String message) { + public TopicExistsException(String message) { super(message); } - public StorageInitializationException(String message, Throwable cause) { + public TopicExistsException(String message, Throwable cause) { super(message, cause); } } diff --git a/core/src/main/java/feast/core/exception/TrainingDatasetCreationException.java b/core/src/main/java/feast/core/exception/TrainingDatasetCreationException.java deleted file mode 100644 index 2d5bc90a997..00000000000 --- a/core/src/main/java/feast/core/exception/TrainingDatasetCreationException.java +++ /dev/null @@ -1,18 +0,0 @@ -package feast.core.exception; - -/** - * Exception that happens when creation of training dataset failed. - */ -public class TrainingDatasetCreationException extends RuntimeException { - public TrainingDatasetCreationException() { - super(); - } - - public TrainingDatasetCreationException(String message) { - super(message); - } - - public TrainingDatasetCreationException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 62a66801cd1..81a77a6f45f 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -17,37 +17,40 @@ package feast.core.grpc; -import com.google.protobuf.Empty; -import com.timgroup.statsd.StatsDClient; +import com.google.common.collect.Lists; +import com.google.protobuf.InvalidProtocolBufferException; import feast.core.CoreServiceGrpc.CoreServiceImplBase; -import feast.core.CoreServiceProto.CoreServiceTypes.ApplyEntityResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ApplyFeatureGroupResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ApplyFeatureResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListFeaturesResponse; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.exception.RegistrationException; +import feast.core.CoreServiceProto.ApplyFeatureSetRequest; +import feast.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.core.CoreServiceProto.GetFeastCoreVersionRequest; +import feast.core.CoreServiceProto.GetFeastCoreVersionResponse; +import feast.core.CoreServiceProto.GetFeatureSetRequest; +import feast.core.CoreServiceProto.GetFeatureSetResponse; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.ListStoresRequest; +import feast.core.CoreServiceProto.ListStoresRequest.Filter; +import feast.core.CoreServiceProto.ListStoresResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.CoreServiceProto.UpdateStoreResponse.Status; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.SourceProto; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.Subscription; import feast.core.exception.RetrievalException; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; +import feast.core.service.JobCoordinatorService; import feast.core.service.SpecService; -import feast.core.validators.SpecValidator; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.grpc.Status; -import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; +import java.util.HashSet; import java.util.List; +import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.lognet.springboot.grpc.GRpcService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.transaction.annotation.Transactional; /** * Implementation of the feast core GRPC service. @@ -58,216 +61,142 @@ public class CoreServiceImpl extends CoreServiceImplBase { @Autowired private SpecService specService; - - @Autowired - private SpecValidator validator; - - @Autowired - private StatsDClient statsDClient; - @Autowired - private StorageSpecs storageSpecs; + private JobCoordinatorService jobCoordinatorService; - /** - * Gets specs for all entities requested in the request. If the retrieval of any one of them - * fails, the whole request will fail, giving an internal error. - */ @Override - public void getEntities( - GetEntitiesRequest request, StreamObserver responseObserver) { - long now = System.currentTimeMillis(); - statsDClient.increment("get_entities_request_count"); - try { - List entitySpecs = - specService - .getEntities(request.getIdsList()) - .stream() - .map(EntityInfo::getEntitySpec) - .collect(Collectors.toList()); - GetEntitiesResponse response = - GetEntitiesResponse.newBuilder().addAllEntities(entitySpecs).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - statsDClient.increment("get_entities_request_success"); - } catch (RetrievalException | IllegalArgumentException e) { - statsDClient.increment("get_entities_request_failed"); - log.error("Error in getEntities: {}", e); - responseObserver.onError(getRuntimeException(e)); - } finally { - long duration = System.currentTimeMillis() - now; - statsDClient.gauge("get_entities_latency_ms", duration); - } + public void getFeastCoreVersion( + GetFeastCoreVersionRequest request, + StreamObserver responseObserver) { + super.getFeastCoreVersion(request, responseObserver); } - /** - * Gets specs for all entities registered in the registry. - */ @Override - public void listEntities(Empty request, StreamObserver responseObserver) { - long now = System.currentTimeMillis(); - statsDClient.increment("list_entities_request_count"); + @Transactional + public void getFeatureSet( + GetFeatureSetRequest request, StreamObserver responseObserver) { try { - List entitySpecs = - specService - .listEntities() - .stream() - .map(EntityInfo::getEntitySpec) - .collect(Collectors.toList()); - ListEntitiesResponse response = - ListEntitiesResponse.newBuilder().addAllEntities(entitySpecs).build(); + GetFeatureSetResponse response = specService.getFeatureSet(request); responseObserver.onNext(response); responseObserver.onCompleted(); - statsDClient.increment("list_entities_request_success"); - } catch (RetrievalException e) { - statsDClient.increment("list_entities_request_failed"); - log.error("Error in listEntities: {}", e); - responseObserver.onError(getRuntimeException(e)); - } finally { - long duration = System.currentTimeMillis() - now; - statsDClient.gauge("list_entities_latency_ms", duration); + } catch (RetrievalException | InvalidProtocolBufferException e) { + log.error("Exception has occurred in GetFeatureSet method: ", e); + responseObserver.onError(e); } } - /** - * Gets specs for all features requested in the request. If the retrieval of any one of them - * fails, the whole request will fail, giving an internal error. - */ @Override - public void getFeatures( - GetFeaturesRequest request, StreamObserver responseObserver) { - long now = System.currentTimeMillis(); - statsDClient.increment("get_features_request_count"); + @Transactional + public void listFeatureSets( + ListFeatureSetsRequest request, StreamObserver responseObserver) { try { - List featureSpecs = - specService - .getFeatures(request.getIdsList()) - .stream() - .map(FeatureInfo::getFeatureSpec) - .collect(Collectors.toList()); - GetFeaturesResponse response = - GetFeaturesResponse.newBuilder().addAllFeatures(featureSpecs).build(); + ListFeatureSetsResponse response = specService.listFeatureSets(request.getFilter()); responseObserver.onNext(response); responseObserver.onCompleted(); - statsDClient.increment("get_features_request_success"); - } catch (RetrievalException | IllegalArgumentException e) { - statsDClient.increment("get_features_request_failed"); - log.error("Error in getFeatures: {}", e); - responseObserver.onError(getRuntimeException(e)); - } finally { - long duration = System.currentTimeMillis() - now; - statsDClient.gauge("get_features_latency_ms", duration); + } catch (RetrievalException | InvalidProtocolBufferException e) { + log.error("Exception has occurred in ListFeatureSet method: ", e); + responseObserver.onError(e); } } - /** - * Gets specs for all features registered in the registry. TODO: some kind of pagination - */ @Override - public void listFeatures(Empty request, StreamObserver responseObserver) { - long now = System.currentTimeMillis(); - statsDClient.increment("list_features_request_count"); + @Transactional + public void listStores( + ListStoresRequest request, StreamObserver responseObserver) { try { - List featureSpecs = - specService - .listFeatures() - .stream() - .map(FeatureInfo::getFeatureSpec) - .collect(Collectors.toList()); - ListFeaturesResponse response = - ListFeaturesResponse.newBuilder().addAllFeatures(featureSpecs).build(); + ListStoresResponse response = specService.listStores(request.getFilter()); responseObserver.onNext(response); responseObserver.onCompleted(); - statsDClient.increment("list_features_request_success"); } catch (RetrievalException e) { - statsDClient.increment("list_features_request_failed"); - log.error("Error in listFeatures: {}", e); - responseObserver.onError(getRuntimeException(e)); - } finally { - long duration = System.currentTimeMillis() - now; - statsDClient.gauge("list_features_latency_ms", duration); + log.error("Exception has occurred in ListStores method: ", e); + responseObserver.onError(e); } } - /** - * Registers a single feature spec to the registry. If validation fails, will returns a bad - * request error. If registration fails (e.g. connection to the db is interrupted), an internal - * error will be returned. - */ @Override - public void applyFeature( - FeatureSpec request, StreamObserver responseObserver) { + @Transactional + public void applyFeatureSet( + ApplyFeatureSetRequest request, StreamObserver responseObserver) { try { - validator.validateFeatureSpec(request); - FeatureInfo feature = specService.applyFeature(request); - ApplyFeatureResponse response = - ApplyFeatureResponse.newBuilder().setFeatureId(feature.getId()).build(); + ApplyFeatureSetResponse response = specService.applyFeatureSet(request.getFeatureSet()); + String featureSetName = response.getFeatureSet().getName(); + ListStoresResponse stores = specService.listStores(Filter.newBuilder().build()); + for (Store store : stores.getStoreList()) { + List relevantSubscriptions = + store.getSubscriptionsList().stream() + .filter( + sub -> { + String subString = sub.getName(); + if (!subString.contains(".*")) + { + subString = subString.replace("*", ".*"); + } + Pattern p = Pattern.compile(subString); + return p.matcher(featureSetName).matches(); + }) + .collect(Collectors.toList()); + Set featureSetSpecs = new HashSet<>(); + for (Subscription subscription : relevantSubscriptions) { + featureSetSpecs.addAll( + specService + .listFeatureSets( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion()) + .build()) + .getFeatureSetsList()); + } + if (!featureSetSpecs.isEmpty() && featureSetSpecs.contains(response.getFeatureSet())) { + // We use the request featureSet source because it contains the information + // about whether to default to the default feature stream or not + SourceProto.Source source = response.getFeatureSet().getSource(); + jobCoordinatorService + .startOrUpdateJob(Lists.newArrayList(featureSetSpecs), source, store); + } + } responseObserver.onNext(response); responseObserver.onCompleted(); - } catch (RegistrationException e) { - log.error("Error in applyFeature: {}", e); - responseObserver.onError(getRuntimeException(e)); - } catch (IllegalArgumentException e) { - log.error("Error in applyFeature: {}", e); - responseObserver.onError(getBadRequestException(e)); + } catch (Exception e) { + log.error("Exception has occurred in ApplyFeatureSet method: ", e); + responseObserver.onError(e); } } - /** - * Registers a single feature group spec to the registry. If validation fails, will returns a bad - * request error. If registration fails (e.g. connection to the db is interrupted), an internal - * error will be returned. - */ @Override - public void applyFeatureGroup( - FeatureGroupSpecProto.FeatureGroupSpec request, - StreamObserver responseObserver) { + @Transactional + public void updateStore(UpdateStoreRequest request, + StreamObserver responseObserver) { try { - validator.validateFeatureGroupSpec(request); - FeatureGroupInfo featureGroup = specService.applyFeatureGroup(request); - ApplyFeatureGroupResponse response = - ApplyFeatureGroupResponse.newBuilder().setFeatureGroupId(featureGroup.getId()).build(); + UpdateStoreResponse response = specService.updateStore(request); responseObserver.onNext(response); responseObserver.onCompleted(); - } catch (RegistrationException e) { - log.error("Error in applyFeatureGroup: {}", e); - responseObserver.onError(getRuntimeException(e)); - } catch (IllegalArgumentException e) { - log.error("Error in applyFeatureGroup: {}", e); - responseObserver.onError(getBadRequestException(e)); - } - } - /** - * Registers a single entity spec to the registry. If validation fails, will returns a bad request - * error. If registration fails (e.g. connection to the db is interrupted), an internal error will - * be returned. - */ - @Override - public void applyEntity( - EntitySpec request, StreamObserver responseObserver) { - try { - validator.validateEntitySpec(request); - EntityInfo entity = specService.applyEntity(request); - ApplyEntityResponse response = - ApplyEntityResponse.newBuilder().setEntityName(entity.getName()).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (RegistrationException e) { - log.error("Error in applyEntity: {}", e); - responseObserver.onError(getRuntimeException(e)); - } catch (IllegalArgumentException e) { - log.error("Error in applyEntity: {}", e); - responseObserver.onError(getBadRequestException(e)); + if (!response.getStatus().equals(Status.NO_CHANGE)) { + Set featureSetSpecs = new HashSet<>(); + Store store = response.getStore(); + for (Subscription subscription : store.getSubscriptionsList()) { + featureSetSpecs.addAll( + specService.listFeatureSets( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion()) + .build()) + .getFeatureSetsList() + ); + } + if (featureSetSpecs.size() == 0) { + return; + } + featureSetSpecs.stream() + .collect(Collectors.groupingBy(FeatureSetSpec::getSource)) + .entrySet() + .stream() + .forEach( + kv -> jobCoordinatorService.startOrUpdateJob(kv.getValue(), kv.getKey(), store)); + } + } catch (Exception e) { + log.error("Exception has occurred in UpdateStore method: ", e); + responseObserver.onError(e); } } - - private StatusRuntimeException getRuntimeException(Exception e) { - return new StatusRuntimeException( - Status.fromCode(Status.Code.INTERNAL).withDescription(e.getMessage()).withCause(e)); - } - - private StatusRuntimeException getBadRequestException(Exception e) { - return new StatusRuntimeException( - Status.fromCode(Status.Code.OUT_OF_RANGE).withDescription(e.getMessage()).withCause(e)); - } } diff --git a/core/src/main/java/feast/core/grpc/DatasetServiceImpl.java b/core/src/main/java/feast/core/grpc/DatasetServiceImpl.java deleted file mode 100644 index a4211de9726..00000000000 --- a/core/src/main/java/feast/core/grpc/DatasetServiceImpl.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.core.grpc; - -import com.google.common.base.Strings; -import com.google.protobuf.Timestamp; -import feast.core.DatasetServiceGrpc.DatasetServiceImplBase; -import feast.core.DatasetServiceProto.DatasetInfo; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.DatasetServiceProto.DatasetServiceTypes.CreateDatasetRequest; -import feast.core.DatasetServiceProto.DatasetServiceTypes.CreateDatasetResponse; -import feast.core.training.BigQueryTraningDatasetCreator; -import io.grpc.Status; -import io.grpc.Status.Code; -import io.grpc.stub.StreamObserver; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import lombok.extern.slf4j.Slf4j; -import org.lognet.springboot.grpc.GRpcService; -import org.springframework.beans.factory.annotation.Autowired; - -@Slf4j -@GRpcService -public class DatasetServiceImpl extends DatasetServiceImplBase { - - private final BigQueryTraningDatasetCreator datasetCreator; - - @Autowired - public DatasetServiceImpl(BigQueryTraningDatasetCreator DatasetCreator) { - this.datasetCreator = DatasetCreator; - } - - @Override - public void createDataset( - CreateDatasetRequest request, - StreamObserver responseObserver) { - try { - checkRequest(request); - } catch (IllegalArgumentException e) { - responseObserver.onError( - Status.fromCode(Code.INVALID_ARGUMENT) - .withCause(e) - .withDescription(e.getMessage()) - .asException()); - return; - } - - try { - DatasetInfo datasetInfo = - datasetCreator.createDataset( - request.getFeatureSet(), - request.getStartDate(), - request.getEndDate(), - request.getLimit(), - request.getNamePrefix(), - request.getFiltersMap()); - CreateDatasetResponse response = - CreateDatasetResponse.newBuilder().setDatasetInfo(datasetInfo).build(); - - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (Exception e) { - log.error("Training dataset creation failed", e); - responseObserver.onError( - Status.fromCode(Code.INTERNAL) - .withCause(e) - .withDescription("Training dataset creation failed: " + e.getMessage()) - .asException()); - } - } - - private void checkRequest(CreateDatasetRequest request) { - FeatureSet featureSet = request.getFeatureSet(); - Timestamp startDate = request.getStartDate(); - Timestamp endDate = request.getEndDate(); - - checkHasSameEntity(featureSet); - checkStartIsBeforeEnd(startDate, endDate); - } - - private void checkStartIsBeforeEnd(Timestamp startDate, Timestamp endDate) { - Instant start = Instant.ofEpochSecond(startDate.getSeconds()).truncatedTo(ChronoUnit.DAYS); - Instant end = Instant.ofEpochSecond(endDate.getSeconds()).truncatedTo(ChronoUnit.DAYS); - - if (start.compareTo(end) > 0) { - throw new IllegalArgumentException("startDate is after endDate"); - } - } - - private void checkHasSameEntity(FeatureSet featureSet) { - String entityName = featureSet.getEntityName(); - if (Strings.isNullOrEmpty(entityName)) { - throw new IllegalArgumentException("entity name in feature set is null or empty"); - } - - if (featureSet.getFeatureIdsCount() < 1) { - throw new IllegalArgumentException("feature set is empty"); - } - - for (String featureId : featureSet.getFeatureIdsList()) { - String entity = featureId.split("\\.")[0]; - if (!entityName.equals(entity)) { - throw new IllegalArgumentException("feature set contains different entity name: " + entity); - } - } - } -} diff --git a/core/src/main/java/feast/core/grpc/JobServiceImpl.java b/core/src/main/java/feast/core/grpc/JobServiceImpl.java deleted file mode 100644 index 47b76061b09..00000000000 --- a/core/src/main/java/feast/core/grpc/JobServiceImpl.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.grpc; - -import com.google.protobuf.Empty; -import feast.core.JobServiceGrpc; -import feast.core.JobServiceProto.JobServiceTypes.AbortJobRequest; -import feast.core.JobServiceProto.JobServiceTypes.AbortJobResponse; -import feast.core.JobServiceProto.JobServiceTypes.GetJobRequest; -import feast.core.JobServiceProto.JobServiceTypes.GetJobResponse; -import feast.core.JobServiceProto.JobServiceTypes.JobDetail; -import feast.core.JobServiceProto.JobServiceTypes.ListJobsResponse; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobRequest; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobResponse; -import feast.core.exception.JobExecutionException; -import feast.core.service.JobManagementService; -import feast.core.validators.SpecValidator; -import io.grpc.Status; -import io.grpc.StatusRuntimeException; -import io.grpc.stub.StreamObserver; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.lognet.springboot.grpc.GRpcService; -import org.springframework.beans.factory.annotation.Autowired; - -/** Implementation of the feast job GRPC service. */ -@Slf4j -@GRpcService -public class JobServiceImpl extends JobServiceGrpc.JobServiceImplBase { - @Autowired private JobManagementService jobManagementService; - - @Autowired private SpecValidator validator; - - /** - * submit a job to the runner by providing an import spec. - * - * @param request ImportJobRequest object containing an import spec - * @param responseObserver - */ - @Override - public void submitJob( - SubmitImportJobRequest request, StreamObserver responseObserver) { - try { - validator.validateImportSpec(request.getImportSpec()); - String jobID = jobManagementService.submitJob(request.getImportSpec(), request.getName()); - SubmitImportJobResponse response = - SubmitImportJobResponse.newBuilder().setJobId(jobID).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (IllegalArgumentException e) { - log.error("Error in submitJob: {}", e); - responseObserver.onError(getBadRequestException(e)); - } catch (JobExecutionException e) { - log.error("Error in submitJob: {}", e); - responseObserver.onError(getRuntimeException(e)); - } - } - - /** - * Abort a job given its feast-internal job id - * - * @param request AbortJobRequest object containing feast job id - * @param responseObserver - */ - @Override - public void abortJob(AbortJobRequest request, StreamObserver responseObserver) { - try { - jobManagementService.abortJob(request.getId()); - AbortJobResponse response = AbortJobResponse.newBuilder().setId(request.getId()).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (Exception e) { - log.error("Error aborting job with id {}: {}", request.getId(), e); - responseObserver.onError(getRuntimeException(e)); - } - } - - /** - * List all jobs previously submitted to the system. - * - * @param request Empty request - * @param responseObserver - */ - @Override - public void listJobs(Empty request, StreamObserver responseObserver) { - try { - List jobs = jobManagementService.listJobs(); - ListJobsResponse response = ListJobsResponse.newBuilder().addAllJobs(jobs).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (Exception e) { - log.error("Error listing jobs: {}", e); - responseObserver.onError(getRuntimeException(e)); - } - } - - /** - * Get a single job previously submitted to the system by id - * - * @param request GetJobRequest object containing a feast-internal job id - * @param responseObserver - */ - @Override - public void getJob(GetJobRequest request, StreamObserver responseObserver) { - try { - JobDetail job = jobManagementService.getJob(request.getId()); - GetJobResponse response = GetJobResponse.newBuilder().setJob(job).build(); - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (Exception e) { - log.error("Error getting job id {}: {}", request.getId(), e); - responseObserver.onError(getRuntimeException(e)); - } - } - - private StatusRuntimeException getRuntimeException(Exception e) { - return new StatusRuntimeException( - Status.fromCode(Status.Code.INTERNAL).withDescription(e.getMessage()).withCause(e)); - } - - private StatusRuntimeException getBadRequestException(Exception e) { - return new StatusRuntimeException( - Status.fromCode(Status.Code.OUT_OF_RANGE).withDescription(e.getMessage()).withCause(e)); - } -} diff --git a/core/src/main/java/feast/core/grpc/UIServiceImpl.java b/core/src/main/java/feast/core/grpc/UIServiceImpl.java deleted file mode 100644 index 8bacfbd8a7a..00000000000 --- a/core/src/main/java/feast/core/grpc/UIServiceImpl.java +++ /dev/null @@ -1,243 +0,0 @@ -package feast.core.grpc; - -import static io.grpc.Status.Code.INTERNAL; -import static io.grpc.Status.Code.INVALID_ARGUMENT; - -import com.google.protobuf.Empty; -import feast.core.UIServiceGrpc.UIServiceImplBase; -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.core.UIServiceProto.UIServiceTypes.GetEntityRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetEntityResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureGroupRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureGroupResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetStorageRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListEntitiesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeatureGroupsResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeaturesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.service.SpecService; -import io.grpc.Status; -import io.grpc.Status.Code; -import io.grpc.stub.StreamObserver; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.lognet.springboot.grpc.GRpcService; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * GRPC Service exposing detailed information of feast's resources. - */ -@Slf4j -@GRpcService -public class UIServiceImpl extends UIServiceImplBase { - - private final SpecService specService; - - @Autowired - public UIServiceImpl(SpecService specService) { - this.specService = specService; - } - - @Override - public void getEntity(GetEntityRequest request, - StreamObserver responseObserver) { - String entityName = request.getId(); - - try { - List entityInfos = specService.getEntities(Collections.singletonList(entityName)); - EntityDetail entityDetail = entityInfos.get(0) - .getEntityDetail(); - - GetEntityResponse response = GetEntityResponse.newBuilder() - .setEntity(entityDetail) - .build(); - - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (IllegalArgumentException e) { - String errMsg = "Invalid entity name: " + entityName; - log.error(errMsg, e); - onError(responseObserver, INVALID_ARGUMENT, errMsg, e); - } catch (Exception e) { - String errMsg = "Error while retrieving entity with name: " + entityName; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void listEntities(Empty request, StreamObserver responseObserver) { - try { - List entityInfos = specService.listEntities(); - List entityDetails = entityInfos.stream() - .map(EntityInfo::getEntityDetail) - .collect(Collectors.toList()); - ListEntitiesResponse response = ListEntitiesResponse.newBuilder() - .addAllEntities(entityDetails) - .build(); - - responseObserver.onNext(response); - responseObserver.onCompleted(); - } catch (Exception e) { - String errMsg = "Error while getting all entities"; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void getFeature(GetFeatureRequest request, - StreamObserver responseObserver) { - String featureId = request.getId(); - try { - List featureInfos = specService - .getFeatures(Collections.singletonList(featureId)); - FeatureDetail featureDetail = featureInfos.get(0) - .getFeatureDetail(specService.getStorageSpecs()); - - GetFeatureResponse resp = GetFeatureResponse.newBuilder() - .setFeature(featureDetail) - .build(); - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (IllegalArgumentException e) { - String errMsg = "Invalid feature ID: " + featureId; - log.error(errMsg); - onError(responseObserver, INVALID_ARGUMENT, errMsg, e); - } catch (Exception e) { - String errMsg = "Error while retrieving feature with ID: " + featureId; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void listFeatures(Empty request, StreamObserver responseObserver) { - try { - List featureDetails = specService.listFeatures() - .stream() - .map((fi) -> fi.getFeatureDetail(specService.getStorageSpecs())) - .collect(Collectors.toList()); - - ListFeaturesResponse resp = ListFeaturesResponse.newBuilder() - .addAllFeatures(featureDetails) - .build(); - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (Exception e) { - String errMsg = "Error while getting all features"; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void getFeatureGroup(GetFeatureGroupRequest request, - StreamObserver responseObserver) { - String featureGroupId = request.getId(); - try { - List featureGroupInfos = specService - .getFeatureGroups(Collections.singletonList(featureGroupId)); - - GetFeatureGroupResponse resp = GetFeatureGroupResponse.newBuilder() - .setFeatureGroup(featureGroupInfos.get(0).getFeatureGroupDetail()) - .build(); - - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (IllegalArgumentException e) { - String errMsg = "Invalid feature group ID: " + featureGroupId; - log.error(errMsg); - onError(responseObserver, INVALID_ARGUMENT, errMsg, e); - } catch (Exception e) { - String errMsg = "Error while getting feature group with ID: " + featureGroupId; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void listFeatureGroups(Empty request, - StreamObserver responseObserver) { - try { - List featureGroupInfos = specService.listFeatureGroups(); - List featureGroupDetails = featureGroupInfos.stream() - .map(FeatureGroupInfo::getFeatureGroupDetail) - .collect(Collectors.toList()); - - ListFeatureGroupsResponse resp = ListFeatureGroupsResponse.newBuilder() - .addAllFeatureGroups(featureGroupDetails) - .build(); - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (Exception e) { - String errMsg = "Error while getting all feature groups"; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void getStorage(GetStorageRequest request, - StreamObserver responseObserver) { - String storageId = request.getId(); - try { - List storageInfos = specService.getStorage(Collections.singletonList(storageId)); - GetStorageResponse resp = GetStorageResponse.newBuilder() - .setStorage(storageInfos.get(0).getStorageDetail()) - .build(); - - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (IllegalArgumentException e) { - String errMsg = "Invalid storage ID: " + storageId; - log.error(errMsg, e); - onError(responseObserver, INVALID_ARGUMENT, errMsg, e); - } catch (Exception e) { - String errMsg = "Error while retrieving storage detail with ID: " + storageId; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - @Override - public void listStorage(Empty request, StreamObserver responseObserver) { - try { - List storageInfos = specService.listStorage(); - List storageDetails = storageInfos.stream() - .map(StorageInfo::getStorageDetail) - .collect(Collectors.toList()); - - ListStorageResponse resp = ListStorageResponse.newBuilder() - .addAllStorage(storageDetails) - .build(); - - responseObserver.onNext(resp); - responseObserver.onCompleted(); - } catch (Exception e) { - String errMsg = "Error while getting all storage details"; - log.error(errMsg, e); - onError(responseObserver, INTERNAL, errMsg, e); - } - } - - private void onError(StreamObserver responseObserver, Code errCode, String message, - Throwable cause) { - responseObserver.onError(Status.fromCode(errCode) - .withDescription(message) - .withCause(cause) - .asException()); - } -} diff --git a/core/src/main/java/feast/core/http/UiServiceController.java b/core/src/main/java/feast/core/http/UiServiceController.java deleted file mode 100644 index 54047a5e192..00000000000 --- a/core/src/main/java/feast/core/http/UiServiceController.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.http; - -import feast.core.JobServiceProto.JobServiceTypes.GetJobResponse; -import feast.core.JobServiceProto.JobServiceTypes.ListJobsResponse; -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.core.UIServiceProto.UIServiceTypes.GetEntityResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureGroupResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListEntitiesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeatureGroupsResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeaturesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.service.JobManagementService; -import feast.core.service.SpecService; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.CrossOrigin; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RestController; - -/** - * Web service serving the feast UI. - */ -@CrossOrigin(maxAge = 3600) -@RestController -@Slf4j -public class UiServiceController { - - private final SpecService specService; - private final JobManagementService jobManagementService; - - @Autowired - public UiServiceController(SpecService specService, JobManagementService jobManagementService) { - this.specService = specService; - this.jobManagementService = jobManagementService; - } - - /** - * List all feature specs registered in the registry. - */ - @RequestMapping( - value = "/api/ui/features", - produces = "application/json", - method = RequestMethod.GET) - public ListFeaturesResponse listFeatures() { - try { - List features = - specService - .listFeatures() - .stream() - .map((fi) -> fi - .getFeatureDetail(specService.getStorageSpecs())) - .collect(Collectors.toList()); - return ListFeaturesResponse.newBuilder().addAllFeatures(features).build(); - } catch (Exception e) { - log.error("Exception in listFeatures: {}", e); - throw e; - } - } - - /** - * Get a single feature spec by ID. - */ - @RequestMapping( - value = "/api/ui/features/{id}", - produces = "application/json", - method = RequestMethod.GET) - public GetFeatureResponse getFeature(@PathVariable("id") String id) { - try { - FeatureInfo featureInfo = specService.getFeatures(Arrays.asList(id)).get(0); - FeatureInfo resolved = featureInfo.resolve(); - StorageSpecs storageSpecs = specService.getStorageSpecs(); - FeatureDetail featureDetail = resolved.getFeatureDetail(storageSpecs); - return GetFeatureResponse.newBuilder() - .setFeature(featureDetail) - .setRawSpec(featureInfo.getFeatureSpec()) - .build(); - } catch (Exception e) { - log.error("Exception in getFeature {}: {}", id, e); - throw e; - } - } - - /** - * List all feature group specs registered in the registry. - */ - @RequestMapping( - value = "/api/ui/feature_groups", - produces = "application/json", - method = RequestMethod.GET) - public ListFeatureGroupsResponse listFeatureGroups() { - try { - List featureGroups = - specService - .listFeatureGroups() - .stream() - .map(FeatureGroupInfo::getFeatureGroupDetail) - .collect(Collectors.toList()); - return ListFeatureGroupsResponse.newBuilder().addAllFeatureGroups(featureGroups).build(); - } catch (Exception e) { - log.error("Exception in listFeatureGroups: {}", e); - throw e; - } - } - - /** - * Get a single feature group spec by ID. - */ - @RequestMapping( - value = "/api/ui/feature_groups/{id}", - produces = "application/json", - method = RequestMethod.GET) - public GetFeatureGroupResponse getFeatureGroup(@PathVariable("id") String id) { - try { - FeatureGroupInfo featureGroupInfo = specService.getFeatureGroups(Arrays.asList(id)).get(0); - return GetFeatureGroupResponse.newBuilder() - .setFeatureGroup(featureGroupInfo.getFeatureGroupDetail()) - .build(); - } catch (Exception e) { - log.error("Exception in getFeatureGroup {}: {}", id, e); - throw e; - } - } - - /** - * List all entity specs registered in the registry. - */ - @RequestMapping( - value = "/api/ui/entities", - produces = "application/json", - method = RequestMethod.GET) - public ListEntitiesResponse listEntities() { - try { - List entities = - specService - .listEntities() - .stream() - .map(EntityInfo::getEntityDetail) - .collect(Collectors.toList()); - return ListEntitiesResponse.newBuilder().addAllEntities(entities).build(); - } catch (Exception e) { - log.error("Exception in listEntities: {}", e); - throw e; - } - } - - /** - * Get a single entity spec by name. - */ - @RequestMapping( - value = "/api/ui/entities/{id}", - produces = "application/json", - method = RequestMethod.GET) - public GetEntityResponse getEntity(@PathVariable("id") String id) { - try { - EntityInfo entityInfo = specService.getEntities(Arrays.asList(id)).get(0); - return GetEntityResponse.newBuilder().setEntity(entityInfo.getEntityDetail()).build(); - } catch (Exception e) { - log.error("Exception in getEntity {}: {}", id, e); - throw e; - } - } - - /** - * List all storage specs registered in the registry. - */ - @RequestMapping( - value = "/api/ui/storage", - produces = "application/json", - method = RequestMethod.GET) - public ListStorageResponse listStorage() { - try { - List storage = - specService - .listStorage() - .stream() - .map(StorageInfo::getStorageDetail) - .collect(Collectors.toList()); - return ListStorageResponse.newBuilder().addAllStorage(storage).build(); - } catch (Exception e) { - log.error("Exception in listStorage: {}", e); - throw e; - } - } - - /** - * Get a single storage spec by name. - */ - @RequestMapping( - value = "/api/ui/storage/{id}", - produces = "application/json", - method = RequestMethod.GET) - public GetStorageResponse getStorage(@PathVariable("id") String id) { - try { - StorageInfo storageInfo = specService.getStorage(Arrays.asList(id)).get(0); - return GetStorageResponse.newBuilder().setStorage(storageInfo.getStorageDetail()).build(); - } catch (Exception e) { - log.error("Exception in getStorage {}: {}", id, e); - throw e; - } - } - - @RequestMapping(value = "/api/ui/jobs", produces = "application/json", method = RequestMethod.GET) - public ListJobsResponse listJobs() { - try { - return ListJobsResponse.newBuilder().addAllJobs(jobManagementService.listJobs()).build(); - } catch (Exception e) { - log.error("Exception in listJobs: {}", e); - throw e; - } - } - - /** - * Get a single job by id. - */ - @RequestMapping( - value = "/api/ui/jobs/{id}", - produces = "application/json", - method = RequestMethod.GET) - public GetJobResponse getJob(@PathVariable("id") String id) { - try { - return GetJobResponse.newBuilder().setJob(jobManagementService.getJob(id)).build(); - } catch (Exception e) { - log.error("Exception in getJob {}: {}", id, e); - throw e; - } - } -} diff --git a/core/src/main/java/feast/core/job/JobManager.java b/core/src/main/java/feast/core/job/JobManager.java index c250a5ee189..3e1d22d2453 100644 --- a/core/src/main/java/feast/core/job/JobManager.java +++ b/core/src/main/java/feast/core/job/JobManager.java @@ -17,24 +17,42 @@ package feast.core.job; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import java.nio.file.Path; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto.Store; +import feast.core.model.JobInfo; +import java.util.List; public interface JobManager { /** - * Submit an ingestion job into runner + * Get Runner Type + * @return runner type + */ + Runner getRunnerType(); + + /** + * Start an import job. + * + * @param name of job to run + * @param featureSets list of featureSets to be populated by the job + * @param sink Store to sink features to + * @return runner specific job id + */ + String startJob(String name, List featureSets, Store sink); + + /** + * Update already running job with new set of features to ingest. * - * @param importJobSpecs wrapper of all the specs needed for the ingestion job to run - * @param workspace path for working directory of the job, for errors and specs - * @return extId runner specific job ID. + * @param jobInfo jobInfo of target job to change + * @return job runner specific job id */ - String submitJob(ImportJobSpecs importJobSpecs, Path workspace); + String updateJob(JobInfo jobInfo); /** - * abort a job given runner-specific job ID. + * Abort a job given runner-specific job ID. * * @param extId runner specific job id. */ void abortJob(String extId); + } diff --git a/core/src/main/java/feast/core/job/JobMonitor.java b/core/src/main/java/feast/core/job/JobMonitor.java index 723bce63531..f77b9df9424 100644 --- a/core/src/main/java/feast/core/job/JobMonitor.java +++ b/core/src/main/java/feast/core/job/JobMonitor.java @@ -32,11 +32,4 @@ public interface JobMonitor { */ JobStatus getJobStatus(JobInfo job); - /** - * Get metrics of a job. - * - * @param job . - * @return list of metrics associated with the job. - */ - List getJobMetrics(JobInfo job); } diff --git a/core/src/main/java/feast/core/job/NoopJobMonitor.java b/core/src/main/java/feast/core/job/NoopJobMonitor.java index 16d399a6f7d..f73782298db 100644 --- a/core/src/main/java/feast/core/job/NoopJobMonitor.java +++ b/core/src/main/java/feast/core/job/NoopJobMonitor.java @@ -29,9 +29,4 @@ public class NoopJobMonitor implements JobMonitor { public JobStatus getJobStatus(JobInfo job) { return JobStatus.UNKNOWN; } - - @Override - public List getJobMetrics(JobInfo job) { - return Collections.emptyList(); - } } diff --git a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java index 426ee7bba21..6772cb5ca26 100644 --- a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java +++ b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java @@ -32,6 +32,7 @@ import java.util.Collection; import java.util.List; +import org.springframework.transaction.annotation.Transactional; @Slf4j @Component @@ -39,28 +40,30 @@ public class ScheduledJobMonitor { private final JobMonitor jobMonitor; private final JobInfoRepository jobInfoRepository; - private final StatsdMetricPusher statsdMetricPusher; @Autowired public ScheduledJobMonitor( JobMonitor jobMonitor, - JobInfoRepository jobInfoRepository, - StatsdMetricPusher statsdMetricPusher) { + JobInfoRepository jobInfoRepository) { this.jobMonitor = jobMonitor; this.jobInfoRepository = jobInfoRepository; - this.statsdMetricPusher = statsdMetricPusher; } - @Scheduled( - fixedDelayString = "${feast.jobs.monitor.period}", - initialDelayString = "${feast.jobs.monitor.initialDelay}") + + // TODO: Keep receiving the following exception with these arguments below + // Caused by: java.lang.IllegalStateException: Encountered invalid @Scheduled method 'pollStatusAndMetrics': Circular placeholder reference .. in property definitions + // @Scheduled( + // fixedDelayString = "${feast.jobs.monitor.fixedDelay}", + // initialDelayString = "${feast.jobs.monitor.initialDelay}") + // + @Transactional + @Scheduled(cron = "* * * * * *") public void pollStatusAndMetrics() { - getJobMetrics(); - getJobStatus(); + updateJobStatus(); } /** Periodically pull status of job which is not in terminal state and update the status in DB. */ - /* package */ void getJobStatus() { + /* package */ void updateJobStatus() { if (jobMonitor instanceof NoopJobMonitor) { return; } @@ -87,28 +90,4 @@ public void pollStatusAndMetrics() { jobInfoRepository.save(job); } } - - /** Periodically pull metrics of job which is not in terminal state and push it to statsd. */ - /* package */ void getJobMetrics() { - if (jobMonitor instanceof NoopJobMonitor) { - return; - } - - Collection nonTerminalJobs = - jobInfoRepository.findByStatusNotIn(JobStatus.getTerminalState()); - - for (JobInfo job : nonTerminalJobs) { - if (Strings.isNullOrEmpty(job.getExtId())) { - continue; - } - List metrics = jobMonitor.getJobMetrics(job); - if (metrics == null) { - continue; - } - - job.setMetrics(metrics); - statsdMetricPusher.pushMetrics(metrics); - jobInfoRepository.save(job); - } - } } diff --git a/core/src/main/java/feast/core/job/StatsdMetricPusher.java b/core/src/main/java/feast/core/job/StatsdMetricPusher.java deleted file mode 100644 index 7b8c193e338..00000000000 --- a/core/src/main/java/feast/core/job/StatsdMetricPusher.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job; - -import com.timgroup.statsd.StatsDClient; -import feast.core.model.Metrics; -import java.util.List; -import org.springframework.beans.factory.annotation.Autowired; - -/** - * Metrics pusher to statsd - */ -public class StatsdMetricPusher { - private final StatsDClient statsDClient; - - @Autowired - public StatsdMetricPusher(StatsDClient statsDClient) { - this.statsDClient = statsDClient; - } - - /** - * Push metrics to statsd - * @param metrics list of Metrics from the runner - */ - public void pushMetrics(List metrics) { - for (Metrics metric : metrics) { - // feast metrics is delimited by colon (:) - // `row:result` <- all - // `scope:scope_id:result` <- per scope, either feature or entity - // for examples: - // 1. feature:driver.ping_sequence_weak_dir_change_mean:error - // 2. entity:driver:stored - // currently there are 3 results: - // 1. valid - // 2. error - // 3. stored - String[] splittedName = metric.getName().split(":"); - String jobId = metric.getJobInfo().getId(); - String scope = splittedName[0]; - switch (scope) { - case "row": - String type = splittedName[1]; - statsDClient - .gauge(createScope(scope), metric.getValue(), - createTag("jobId", jobId), - createTag("type", type)); - break; - case "feature": - String featureId = splittedName[1]; - String result = splittedName[2]; - statsDClient - .gauge(createScope(scope), metric.getValue(), - createTag("jobId", jobId), - createTag("featureId", featureId), - createTag("result", result)); - break; - case "entity": - String entityName = splittedName[1]; - String opResult = splittedName[2]; - statsDClient - .gauge(createScope(scope), metric.getValue(), - createTag("jobId", jobId), - createTag("entityName", entityName), - createTag("result", opResult)); - break; - } - } - } - - private String createTag(String tagName, String tagValue) { - return tagName + ":" + tagValue; - } - - private String createScope(String scopeName) { - return scopeName + "_metric"; - } -} diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java index 65d3046eb79..89184f4f01f 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java @@ -17,39 +17,92 @@ package feast.core.job.dataflow; -import static com.google.common.base.Preconditions.checkNotNull; +import static feast.core.util.PipelineUtil.detectClassPathResourcesToStage; import com.google.api.services.dataflow.Dataflow; import com.google.api.services.dataflow.model.Job; import com.google.common.base.Strings; -import feast.core.config.ImportJobDefaults; -import feast.core.job.direct.DirectRunnerJobManager; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import java.nio.file.Path; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import com.google.protobuf.util.JsonFormat.Printer; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto.Store; +import feast.core.config.FeastProperties.MetricsProperties; +import feast.core.exception.JobExecutionException; +import feast.core.job.JobManager; +import feast.core.job.Runner; +import feast.core.model.FeatureSet; +import feast.core.model.JobInfo; +import feast.core.util.TypeConversion; +import feast.ingestion.ImportJob; +import feast.ingestion.options.ImportOptions; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; import lombok.extern.slf4j.Slf4j; +import org.apache.beam.runners.dataflow.DataflowPipelineJob; +import org.apache.beam.runners.dataflow.DataflowRunner; +import org.apache.beam.sdk.PipelineResult.State; +import org.apache.beam.sdk.options.PipelineOptionsFactory; @Slf4j -public class DataflowJobManager extends DirectRunnerJobManager { +public class DataflowJobManager implements JobManager { + + private final Runner RUNNER_TYPE = Runner.DATAFLOW; private final String projectId; private final String location; private final Dataflow dataflow; + private final Map defaultOptions; + private final MetricsProperties metrics; public DataflowJobManager( - Dataflow dataflow, String projectId, String location, ImportJobDefaults importJobDefaults) { - super(importJobDefaults); - checkNotNull(projectId); - checkNotNull(location); - this.projectId = projectId; - this.location = location; + Dataflow dataflow, Map defaultOptions, MetricsProperties metricsProperties) { + this.defaultOptions = defaultOptions; this.dataflow = dataflow; + this.metrics = metricsProperties; + this.projectId = defaultOptions.get("project"); + this.location = defaultOptions.get("region"); + } + + @Override + public Runner getRunnerType() { + return RUNNER_TYPE; + } + + + @Override + public String startJob(String name, List featureSets, Store sink) { + return submitDataflowJob(name, featureSets, sink, false); } + /** + * Update an existing Dataflow job. + * + * @param jobInfo jobInfo of target job to change + * @return Dataflow-specific job id + */ @Override - public String submitJob(ImportJobSpecs importJobSpecs, Path workspace) { - return super.submitJob(importJobSpecs, workspace); + public String updateJob(JobInfo jobInfo) { + try { + List featureSetSpecs = new ArrayList<>(); + for (FeatureSet featureSet : jobInfo.getFeatureSets()) { + featureSetSpecs.add(featureSet.toProto()); + } + return submitDataflowJob(jobInfo.getId(), featureSetSpecs, jobInfo.getStore().toProto(), + true); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(String.format("Unable to update job %s", jobInfo.getId()), e); + } } + /** + * Abort an existing Dataflow job. Streaming Dataflow jobs are always drained, not cancelled. + * + * @param dataflowJobId Dataflow-specific job id (not the job name) + */ @Override public void abortJob(String dataflowJobId) { try { @@ -73,4 +126,72 @@ public void abortJob(String dataflowJobId) { Strings.lenientFormat("Unable to drain job with id: %s", dataflowJobId), e); } } + + private String submitDataflowJob(String jobName, List featureSets, Store sink, + boolean update) { + try { + ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSets, sink, update); + DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions); + String jobId = waitForJobToRun(pipelineResult); + return jobId; + } catch (Exception e) { + log.error("Error submitting job", e); + throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); + } + } + + private ImportOptions getPipelineOptions(String jobName, List featureSets, + Store sink, + boolean update) throws IOException { + String[] args = TypeConversion.convertMapToArgs(defaultOptions); + ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); + Printer printer = JsonFormat.printer(); + List featureSetsJson = new ArrayList<>(); + for (FeatureSetSpec featureSet : featureSets) { + featureSetsJson.add(printer.print(featureSet)); + } + pipelineOptions.setFeatureSetSpecJson(featureSetsJson); + pipelineOptions.setStoreJson(Collections.singletonList(printer.print(sink))); + pipelineOptions.setProject(projectId); + pipelineOptions.setUpdate(update); + pipelineOptions.setRunner(DataflowRunner.class); + pipelineOptions.setJobName(jobName); + pipelineOptions + .setFilesToStage(detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader())); + + if (metrics.isEnabled()) { + pipelineOptions.setMetricsExporterType(metrics.getType()); + if (metrics.getType().equals("statsd")) { + pipelineOptions.setStatsdHost(metrics.getHost()); + pipelineOptions.setStatsdPort(metrics.getPort()); + } + } + return pipelineOptions; + } + + public DataflowPipelineJob runPipeline(ImportOptions pipelineOptions) + throws IOException { + return (DataflowPipelineJob) ImportJob + .runPipeline(pipelineOptions); + } + + private String waitForJobToRun(DataflowPipelineJob pipelineResult) + throws RuntimeException, InterruptedException { + // TODO: add timeout + while (true) { + State state = pipelineResult.getState(); + if (state.isTerminal()) { + String dataflowDashboardUrl = String + .format("https://console.cloud.google.com/dataflow/jobsDetail/locations/%s/jobs/%s", + location, pipelineResult.getJobId()); + throw new RuntimeException( + String.format( + "Failed to submit dataflow job, job state is %s. Refer to the dataflow dashboard for more information: %s", + state.toString(), dataflowDashboardUrl)); + } else if (state.equals(State.RUNNING)) { + return pipelineResult.getJobId(); + } + Thread.sleep(2000); + } + } } diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java index 90252c38629..c8cfaeda4b8 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java @@ -74,44 +74,4 @@ public JobStatus getJobStatus(JobInfo jobInfo) { } return JobStatus.UNKNOWN; } - - - /** - * Get list of feast-related metrics of a dataflow job given the {@code dataflowJobId}. - * - * @param job job instance. - * @return list of feast-related metrics. Or return an empty list if error happens. - */ - public List getJobMetrics(JobInfo job) { - if (!Runner.DATAFLOW.getName().equals(job.getRunner())) { - return null; - } - - String dataflowJobId = job.getExtId(); - try { - JobMetrics jobMetrics = dataflow.projects().locations().jobs() - .getMetrics(projectId, location, dataflowJobId).execute(); - return jobMetrics.getMetrics().stream() - // only get feast metrics - .filter(m -> FEAST_METRICS_NAMESPACE - .equals(m.getName().getContext().get(METRICS_NAMESPACE_KEY))) - // convert to internal feast metric structure. - .map(m -> mapDataflowMetricsToFeastMetrics(job, m)) - .filter(m -> m != null) - .collect(Collectors.toList()); - } catch (Exception e) { - log.error("Unable to retrieve metrics for job with id: {}\ncause: {}", dataflowJobId, e); - } - return Collections.emptyList(); - } - - private Metrics mapDataflowMetricsToFeastMetrics(JobInfo job, MetricUpdate dfMetrics) { - String name = dfMetrics.getName().getName(); - if (dfMetrics.getScalar() == null) { - return null; - } - double value = ((Number) dfMetrics.getScalar()).doubleValue(); - Metrics feastMetric = new Metrics(job, name, value); - return feastMetric; - } } diff --git a/core/src/main/java/feast/core/job/direct/DirectJob.java b/core/src/main/java/feast/core/job/direct/DirectJob.java new file mode 100644 index 00000000000..10480c51c64 --- /dev/null +++ b/core/src/main/java/feast/core/job/direct/DirectJob.java @@ -0,0 +1,26 @@ +package feast.core.job.direct; + +import java.io.IOException; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.apache.beam.sdk.PipelineResult; + + +@Getter +@AllArgsConstructor +public class DirectJob { + + private String jobId; + private PipelineResult pipelineResult; + + /** + * Abort the job, if the state is not terminal. If the job has already concluded, this method will + * do nothing. + */ + public void abort() throws IOException { + if (!pipelineResult.getState().isTerminal()) { + pipelineResult.cancel(); + } + } +} + diff --git a/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java new file mode 100644 index 00000000000..99f478644ab --- /dev/null +++ b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java @@ -0,0 +1,64 @@ +package feast.core.job.direct; + +import com.google.common.base.Strings; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import javax.inject.Singleton; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Singleton +public class DirectJobRegistry { + + private Map jobs; + + public DirectJobRegistry() { + this.jobs = new HashMap<>(); + } + + /** + * Add the given job to the registry. + * @param job containing the job id, + */ + public void add(DirectJob job) { + if (jobs.containsKey(job.getJobId())) { + throw new IllegalArgumentException( + Strings.lenientFormat("Job with id %s already exists and is running", job.getJobId())); + } + jobs.put(job.getJobId(), job); + } + + /** + * Get DirectJob corresponding to the given ID + * + * @param id of the job to retrieve + * @return DirectJob + */ + public DirectJob get(String id) { + return jobs.getOrDefault(id, null); + } + + /** + * Remove DirectJob corresponding to the given ID + * + * @param id of the job to remove + */ + public void remove(String id) { + jobs.remove(id); + } + + /** + * Kill all child jobs when the registry is garbage collected + */ + @Override + public void finalize() { + for (DirectJob job : this.jobs.values()) { + try { + job.getPipelineResult().cancel(); + } catch (IOException e) { + log.error("Failed to stop job", e); + } + } + } +} diff --git a/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java new file mode 100644 index 00000000000..763340b605f --- /dev/null +++ b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java @@ -0,0 +1,31 @@ +package feast.core.job.direct; + +import feast.core.model.JobStatus; +import java.util.HashMap; +import java.util.Map; +import org.apache.beam.sdk.PipelineResult.State; + +public class DirectJobStateMapper { + + private static final Map BEAM_TO_FEAT_JOB_STATUS; + + static { + BEAM_TO_FEAT_JOB_STATUS = new HashMap<>(); + BEAM_TO_FEAT_JOB_STATUS.put(State.FAILED, JobStatus.ERROR); + BEAM_TO_FEAT_JOB_STATUS.put(State.RUNNING, JobStatus.RUNNING); + BEAM_TO_FEAT_JOB_STATUS.put(State.UNKNOWN, JobStatus.UNKNOWN); + BEAM_TO_FEAT_JOB_STATUS.put(State.CANCELLED, JobStatus.ABORTED); + BEAM_TO_FEAT_JOB_STATUS.put(State.DONE, JobStatus.COMPLETED); + BEAM_TO_FEAT_JOB_STATUS.put(State.STOPPED, JobStatus.ABORTED); + BEAM_TO_FEAT_JOB_STATUS.put(State.UPDATED, JobStatus.RUNNING); + } + + /** + * Map a dataflow job state to Feast's JobStatus + * @param jobState beam PipelineResult State + * @return JobStatus + */ + public JobStatus map(State jobState) { + return BEAM_TO_FEAT_JOB_STATUS.get(jobState); + } +} diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java index c4055b929bc..a9b968bcb91 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java @@ -17,109 +17,145 @@ package feast.core.job.direct; -import com.google.common.annotations.VisibleForTesting; -import feast.core.config.ImportJobDefaults; +import com.google.common.base.Strings; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import com.google.protobuf.util.JsonFormat.Printer; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto; +import feast.core.config.FeastProperties.MetricsProperties; import feast.core.exception.JobExecutionException; import feast.core.job.JobManager; +import feast.core.job.Runner; +import feast.core.model.FeatureSet; +import feast.core.model.JobInfo; import feast.core.util.TypeConversion; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.nio.file.Path; +import feast.ingestion.ImportJob; +import feast.ingestion.options.ImportOptions; +import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; -import java.util.regex.Pattern; import lombok.extern.slf4j.Slf4j; +import org.apache.beam.runners.direct.DirectRunner; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.options.PipelineOptionsFactory; @Slf4j public class DirectRunnerJobManager implements JobManager { - private static final int SLEEP_MS = 10; - private static final Pattern JOB_EXT_ID_PREFIX_REGEX = Pattern.compile(".*FeastImportJobId:.*"); - protected ImportJobDefaults defaults; + private final Runner RUNNER_TYPE = Runner.DIRECT; - public DirectRunnerJobManager(ImportJobDefaults importJobDefaults) { - this.defaults = importJobDefaults; + protected Map defaultOptions; + private final DirectJobRegistry jobs; + private MetricsProperties metrics; + + + public DirectRunnerJobManager(Map defaultOptions, DirectJobRegistry jobs, + MetricsProperties metricsProperties) { + this.defaultOptions = defaultOptions; + this.jobs = jobs; + this.metrics = metricsProperties; } @Override - public String submitJob(ImportJobSpecs importJobSpecs, Path workspace) { - ProcessBuilder pb = getProcessBuilder(importJobSpecs, workspace); - - log.info(String.format("Executing command: %s", String.join(" ", pb.command()))); + public Runner getRunnerType() { + return RUNNER_TYPE; + } + /** + * Start a direct runner job. + * + * @param name of job to run + * @param featureSetSpecs list of specs for featureSets to be populated by the job + * @param sinkSpec Store to sink features to + */ + @Override + public String startJob(String name, List featureSetSpecs, + StoreProto.Store sinkSpec) { try { - Process p = pb.start(); - return runProcess(p); + ImportOptions pipelineOptions = getPipelineOptions(featureSetSpecs, sinkSpec); + PipelineResult pipelineResult = runPipeline(pipelineOptions); + DirectJob directJob = new DirectJob(name, pipelineResult); + jobs.add(directJob); + return name; } catch (Exception e) { log.error("Error submitting job", e); throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); } } - @Override - public void abortJob(String extId) { - throw new UnsupportedOperationException("Unable to abort a job running in direct runner"); + private ImportOptions getPipelineOptions(List featureSetSpecs, + StoreProto.Store sink) + throws InvalidProtocolBufferException { + String[] args = TypeConversion.convertMapToArgs(defaultOptions); + ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); + Printer printer = JsonFormat.printer(); + List featureSetsJson = new ArrayList<>(); + for (FeatureSetSpec featureSetSpec : featureSetSpecs) { + featureSetsJson.add(printer.print(featureSetSpec)); + } + pipelineOptions.setFeatureSetSpecJson(featureSetsJson); + pipelineOptions.setStoreJson(Collections.singletonList(printer.print(sink))); + pipelineOptions.setRunner(DirectRunner.class); + pipelineOptions.setProject(""); // set to default value to satisfy validation + if (metrics.isEnabled()) { + pipelineOptions.setMetricsExporterType(metrics.getType()); + if (metrics.getType().equals("statsd")) { + pipelineOptions.setStatsdHost(metrics.getHost()); + pipelineOptions.setStatsdPort(metrics.getPort()); + } + } + pipelineOptions.setBlockOnRun(false); + return pipelineOptions; } /** - * Builds the command to execute the ingestion job + * Stops an existing job and restarts a new job in its place as a proxy for job updates. + * Note that since we do not maintain a consumer group across the two jobs and the old job + * is not drained, some data may be lost. + * + * As a rule of thumb, direct jobs in feast should only be used for testing. * - * @return configured ProcessBuilder + * @param jobInfo jobInfo of target job to change + * @return jobId of the job */ - @VisibleForTesting - public ProcessBuilder getProcessBuilder(ImportJobSpecs importJobSpecs, Path workspace) { - Map options = - TypeConversion.convertJsonStringToMap(defaults.getImportJobOptions()); - List commands = new ArrayList<>(); - commands.add("java"); - commands.add("-jar"); - commands.add(defaults.getExecutable()); - commands.add(option("jobName", importJobSpecs.getJobId())); - commands.add(option("workspace", workspace.toUri().toString())); - commands.add(option("runner", defaults.getRunner())); - - options.forEach((k, v) -> commands.add(option(k, v))); - return new ProcessBuilder(commands); + @Override + public String updateJob(JobInfo jobInfo) { + String jobId = jobInfo.getExtId(); + abortJob(jobId); + try { + List featureSetSpecs = new ArrayList<>(); + for (FeatureSet featureSet : jobInfo.getFeatureSets()) { + featureSetSpecs.add(featureSet.toProto()); + } + startJob(jobId, featureSetSpecs, jobInfo.getStore().toProto()); + } catch (JobExecutionException | InvalidProtocolBufferException e) { + throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); + } + return jobId; } /** - * Run the given process and extract the job id from the output logs + * Abort the direct runner job with the given id, then remove it from the direct jobs registry. * - * @param p Process - * @return job id + * @param extId runner specific job id. */ - @VisibleForTesting - public String runProcess(Process p) { - try (BufferedReader outputStream = - new BufferedReader(new InputStreamReader(p.getInputStream())); - BufferedReader errorsStream = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - String extId = ""; - while (p.isAlive()) { - while (outputStream.ready()) { - String l = outputStream.readLine(); - System.out.println(l); - if (JOB_EXT_ID_PREFIX_REGEX.matcher(l).matches()) { - extId = l.split("FeastImportJobId:")[1]; - } - } - Thread.sleep(SLEEP_MS); - } - if (p.exitValue() > 0) { - Optional errorString = errorsStream.lines().reduce((l1, l2) -> l1 + '\n' + l2); - throw new RuntimeException(String.format("Could not submit job: \n%s", errorString)); - } - return extId; - } catch (Exception e) { - log.error("Error running ingestion job: ", e); - throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); + @Override + public void abortJob(String extId) { + DirectJob job = jobs.get(extId); + try { + job.abort(); + } catch (IOException e) { + throw new RuntimeException( + Strings.lenientFormat("Unable to abort DirectRunner job %s", extId), e); } + jobs.remove(extId); } - private String option(String key, String value) { - return String.format("--%s=%s", key, value); + public PipelineResult runPipeline(ImportOptions pipelineOptions) + throws IOException { + return ImportJob.runPipeline(pipelineOptions); } } diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java new file mode 100644 index 00000000000..e0b8a8ff64a --- /dev/null +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java @@ -0,0 +1,31 @@ +package feast.core.job.direct; + +import feast.core.job.JobMonitor; +import feast.core.model.JobInfo; +import feast.core.model.JobStatus; +import feast.core.model.Metrics; +import java.util.ArrayList; +import java.util.List; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DirectRunnerJobMonitor implements JobMonitor { + + private final DirectJobRegistry jobs; + private final DirectJobStateMapper jobStateMapper; + + public DirectRunnerJobMonitor(DirectJobRegistry jobs) { + this.jobs = jobs; + jobStateMapper = new DirectJobStateMapper(); + } + + @Override + public JobStatus getJobStatus(JobInfo job) { + DirectJob directJob = jobs.get(job.getId()); + if (directJob == null) { + return JobStatus.ABORTED; + } + return jobStateMapper.map(directJob.getPipelineResult().getState()); + } + +} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJob.java b/core/src/main/java/feast/core/job/flink/FlinkJob.java deleted file mode 100644 index dc514c66062..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJob.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -@NoArgsConstructor -@AllArgsConstructor -@Setter -@Getter -@JsonIgnoreProperties(ignoreUnknown = true) -public class FlinkJob { - - /** job ID */ - String jid; - - /** job name */ - String name; - - /** state */ - String state; -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobConfig.java b/core/src/main/java/feast/core/job/flink/FlinkJobConfig.java deleted file mode 100644 index f26b2c4be04..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobConfig.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import lombok.Value; - -@Value -public class FlinkJobConfig { - - /** - * Flink's job master URL - * e.g: localhost:8081 - */ - String masterUrl; - - /** - * Directory containing flink-conf.yaml - * e.g.: /etc/flink/conf - */ - String configDir; -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobList.java b/core/src/main/java/feast/core/job/flink/FlinkJobList.java deleted file mode 100644 index af9776d6439..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobList.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import java.util.List; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -@NoArgsConstructor -@Setter -@Getter -@JsonIgnoreProperties(ignoreUnknown = true) -public class FlinkJobList { - - /** List of flink job. */ - List jobs; -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobManager.java b/core/src/main/java/feast/core/job/flink/FlinkJobManager.java deleted file mode 100644 index 72cfdb58a75..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobManager.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import feast.core.config.ImportJobDefaults; -import feast.core.job.JobManager; -import feast.core.util.TypeConversion; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.client.cli.CliFrontend; - -@Slf4j -public class FlinkJobManager implements JobManager { - - private final CliFrontend flinkCli; - private final ImportJobDefaults defaults; - private final String masterUrl; - private final FlinkRestApi flinkRestApis; - - public FlinkJobManager( - CliFrontend flinkCli, - FlinkJobConfig config, - FlinkRestApi flinkRestApi, - ImportJobDefaults defaults) { - this.flinkCli = flinkCli; - this.defaults = defaults; - this.masterUrl = config.getMasterUrl(); - this.flinkRestApis = flinkRestApi; - } - - @Override - public String submitJob(ImportJobSpecs importJobSpecs, Path workspace) { - flinkCli.parseParameters(createRunArgs(importJobSpecs, workspace)); - - return getFlinkJobId(importJobSpecs.getJobId()); - } - - @Override - public void abortJob(String extId) { - flinkCli.parseParameters(createStopArgs(extId)); - } - - private String getFlinkJobId(String jobId) { - FlinkJobList jobList = flinkRestApis.getJobsOverview(); - for (FlinkJob job : jobList.getJobs()) { - if (jobId.equals(job.getName())) { - return job.getJid(); - } - } - log.warn("Unable to find job: {}", jobId); - return ""; - } - - private String[] createRunArgs(ImportJobSpecs importJobSpecs, Path workspace) { - Map options = - TypeConversion.convertJsonStringToMap(defaults.getImportJobOptions()); - List commands = new ArrayList<>(); - commands.add("run"); - commands.add("-d"); - commands.add("-m"); - commands.add(masterUrl); - commands.add(defaults.getExecutable()); - commands.add(option("jobName", importJobSpecs.getJobId())); - commands.add(option("runner", defaults.getRunner())); - commands.add(option("workspace", workspace.toString())); - - options.forEach((k, v) -> commands.add(option(k, v))); - return commands.toArray(new String[]{}); - } - - private String[] createStopArgs(String extId) { - List commands = new ArrayList<>(); - commands.add("cancel"); - commands.add("-m"); - commands.add(masterUrl); - commands.add(extId); - return commands.toArray(new String[]{}); - } - - private String option(String key, String value) { - return String.format("--%s=%s", key, value); - } -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobMapper.java b/core/src/main/java/feast/core/job/flink/FlinkJobMapper.java deleted file mode 100644 index 5bbcac29ef2..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobMapper.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import feast.core.model.JobStatus; -import java.util.HashMap; -import java.util.Map; - -public class FlinkJobMapper { - private static final Map FLINK_TO_FEAST_JOB_STATE_MAP; - - static { - FLINK_TO_FEAST_JOB_STATE_MAP = new HashMap<>(); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.CREATED, JobStatus.PENDING); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.RUNNING, JobStatus.RUNNING); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.FINISHED, JobStatus.COMPLETED); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.RESTARTING, JobStatus.RUNNING); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.CANCELLING, JobStatus.ABORTING); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.CANCELED, JobStatus.ABORTED); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.FAILING, JobStatus.ERROR); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.FAILED, JobStatus.ERROR); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.SUSPENDING, JobStatus.SUSPENDING); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.SUSPENDED, JobStatus.SUSPENDED); - FLINK_TO_FEAST_JOB_STATE_MAP.put(FlinkJobState.RECONCILING, JobStatus.PENDING); - } - - /** - * Map a string containing Flink's JobState into Feast's JobStatus - * - * @param jobState Flink JobState - * @return JobStatus. - * @throws IllegalArgumentException if jobState is invalid. - */ - public JobStatus map(String jobState) { - FlinkJobState dfJobState = FlinkJobState.valueOf(jobState); - if (FLINK_TO_FEAST_JOB_STATE_MAP.containsKey(dfJobState)) { - return FLINK_TO_FEAST_JOB_STATE_MAP.get(dfJobState); - } - throw new IllegalArgumentException("Unknown job state: " + jobState); - } -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobMonitor.java b/core/src/main/java/feast/core/job/flink/FlinkJobMonitor.java deleted file mode 100644 index 0ba6b561244..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobMonitor.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import feast.core.job.JobMonitor; -import feast.core.job.Runner; -import feast.core.model.JobInfo; -import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import java.util.Collections; -import java.util.List; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class FlinkJobMonitor implements JobMonitor { - - private final FlinkRestApi flinkRestApi; - private final FlinkJobMapper mapper; - - public FlinkJobMonitor(FlinkRestApi flinkRestApi) { - this.flinkRestApi = flinkRestApi; - this.mapper = new FlinkJobMapper(); - } - - @Override - public JobStatus getJobStatus(JobInfo jobInfo) { - if (!Runner.FLINK.getName().equals(jobInfo.getRunner())) { - return jobInfo.getStatus(); - } - - FlinkJobList jobList = flinkRestApi.getJobsOverview(); - for (FlinkJob job : jobList.getJobs()) { - if (jobInfo.getExtId().equals(job.getJid())) { - return mapFlinkJobStatusToFeastJobStatus(job.getState()); - } - } - return JobStatus.UNKNOWN; - } - - @Override - public List getJobMetrics(JobInfo job) { - if (!Runner.FLINK.getName().equals(job.getRunner())) { - return null; - } - // TODO: metrics for flink - return Collections.emptyList(); - } - - private JobStatus mapFlinkJobStatusToFeastJobStatus(String state) { - try { - return mapper.map(state); - } catch (IllegalArgumentException e) { - log.error("Unknown job state: " + state); - return JobStatus.UNKNOWN; - } - } -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkJobState.java b/core/src/main/java/feast/core/job/flink/FlinkJobState.java deleted file mode 100644 index 61b75501cc8..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkJobState.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -/** - * Possible state of flink's job. - */ -public enum FlinkJobState { - - /** Job is newly created */ - CREATED, - - /** Job is running */ - RUNNING, - - /** job is completed successfully */ - FINISHED, - - /** job is reset and restarting */ - RESTARTING, - - /** job is being canceled */ - CANCELLING, - - /** job has ben cancelled */ - CANCELED, - - /** job has failed and waiting for cleanup */ - FAILING, - - /** job has failed with a non-recoverable failure */ - FAILED, - - /** job has been suspended and waiting for cleanup */ - SUSPENDING, - - /** job has been suspended */ - SUSPENDED, - - /** job is reconciling and waits for task execution to recover state */ - RECONCILING -} diff --git a/core/src/main/java/feast/core/job/flink/FlinkRestApi.java b/core/src/main/java/feast/core/job/flink/FlinkRestApi.java deleted file mode 100644 index e6e7c6b6ce8..00000000000 --- a/core/src/main/java/feast/core/job/flink/FlinkRestApi.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.job.flink; - -import java.net.URI; -import java.util.Collections; -import lombok.extern.slf4j.Slf4j; -import org.springframework.web.client.RestTemplate; - -@Slf4j -public class FlinkRestApi { - - private static final String SCHEME = "http"; - private static final String JOB_OVERVIEW_PATH = "jobs/overview"; - private final RestTemplate restTemplate; - private final URI jobsOverviewUri; - - public FlinkRestApi(RestTemplate restTemplate, String masterUrl) throws Exception { - this.restTemplate = restTemplate; - this.jobsOverviewUri = - new URI(String.format("%s://%s/%s", SCHEME, masterUrl, JOB_OVERVIEW_PATH)); - } - - public FlinkJobList getJobsOverview() { - try { - FlinkJobList jobList = restTemplate.getForObject(jobsOverviewUri, FlinkJobList.class); - if (jobList == null || jobList.getJobs() == null) { - jobList.setJobs(Collections.emptyList()); - } - return jobList; - } catch (Exception e) { - log.error("Unable to get job overview from {}: ", jobsOverviewUri, e); - FlinkJobList flinkJobList = new FlinkJobList(); - flinkJobList.setJobs(Collections.emptyList()); - return flinkJobList; - } - } -} diff --git a/core/src/main/java/feast/core/model/EntityInfo.java b/core/src/main/java/feast/core/model/EntityInfo.java deleted file mode 100644 index e990361e498..00000000000 --- a/core/src/main/java/feast/core/model/EntityInfo.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.specs.EntitySpecProto.EntitySpec; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.Setter; - -import javax.persistence.*; -import java.util.List; - -import static feast.core.util.TypeConversion.convertTagStringToList; -import static feast.core.util.TypeConversion.convertTimestamp; - -/** - * A row in the registry storing information about a single Entity, including its relevant metadata. - */ -@AllArgsConstructor -@Getter -@Setter -@Entity -@Table(name = "entities") -public class EntityInfo extends AbstractTimestampEntity { - - @Id - @Column(name = "name", nullable = false) - private String name; - - @Column(name = "description", nullable = false) - private String description; - - @Column(name = "tags") - private String tags; - - @ManyToMany(mappedBy = "entities") - private List jobs; - - @Column(name = "enabled") - private boolean enabled = true; - - public EntityInfo() { - super(); - } - - public EntityInfo(EntitySpec spec) { - this.name = spec.getName(); - this.description = spec.getDescription(); - this.tags = String.join(",", spec.getTagsList()); - } - - /** Get the entity spec associated with this record. */ - public EntitySpec getEntitySpec() { - return EntitySpec.newBuilder() - .setName(name) - .setDescription(description) - .addAllTags(convertTagStringToList(tags)) - .build(); - } - - /** Get the entity detail containing both spec and metadata, associated with this record. */ - public EntityDetail getEntityDetail() { - return EntityDetail.newBuilder() - .setSpec(this.getEntitySpec()) - .setLastUpdated(convertTimestamp(this.getLastUpdated())) - .build(); - } - - /** - * Updates the entity info with specifications from the incoming entity spec. - * - * @param update new entity spec - */ - public void update(EntitySpec update) { - this.description = update.getDescription(); - this.tags = String.join(",", update.getTagsList()); - } -} diff --git a/core/src/main/java/feast/core/model/FeatureGroupInfo.java b/core/src/main/java/feast/core/model/FeatureGroupInfo.java deleted file mode 100644 index 50f96976a8a..00000000000 --- a/core/src/main/java/feast/core/model/FeatureGroupInfo.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import com.google.common.collect.Maps; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.core.util.TypeConversion; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.Setter; - -/** - * A row in the registry storing information about a single feature group, including its relevant - * metadata. - */ -@AllArgsConstructor -@Entity -@Getter -@Setter -@Table(name = "feature_groups") -public class FeatureGroupInfo extends AbstractTimestampEntity { - - @Id - private String id; - - @Column(name = "tags") - private String tags; - - @Column(name = "options") - private String options; - - public FeatureGroupInfo() { - super(); - } - - public FeatureGroupInfo( - FeatureGroupSpec spec) { - this.id = spec.getId(); - this.tags = String.join(",", spec.getTagsList()); - this.options = TypeConversion.convertMapToJsonString(spec.getOptionsMap()); - } - - /** - * Get the feature group spec associated with this record. - */ - public FeatureGroupSpec getFeatureGroupSpec() { - return FeatureGroupSpec.newBuilder() - .setId(id) - .addAllTags(TypeConversion.convertTagStringToList(tags)) - .putAllOptions(TypeConversion.convertJsonStringToMap(options)) - .build(); - } - - /** - * Get the feature group detail containing both spec and metadata, associated with this record. - */ - public FeatureGroupDetail getFeatureGroupDetail() { - return FeatureGroupDetail.newBuilder() - .setSpec(this.getFeatureGroupSpec()) - .setLastUpdated(TypeConversion.convertTimestamp(this.getLastUpdated())) - .build(); - } - - public void update(FeatureGroupSpec update) throws IllegalArgumentException { - if (!isLegalUpdate(update)) { - throw new IllegalArgumentException( - "Feature group already exists. Update only allowed for fields: [tags]"); - } - this.tags = String.join(",", update.getTagsList()); - } - - private boolean isLegalUpdate(FeatureGroupSpec update) { - FeatureGroupSpec spec = this.getFeatureGroupSpec(); - return Maps.difference(spec.getOptionsMap(), update.getOptionsMap()).areEqual(); - } -} diff --git a/core/src/main/java/feast/core/model/FeatureInfo.java b/core/src/main/java/feast/core/model/FeatureInfo.java deleted file mode 100644 index ec52d8822f5..00000000000 --- a/core/src/main/java/feast/core/model/FeatureInfo.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import static feast.core.util.TypeConversion.convertJsonStringToMap; -import static feast.core.util.TypeConversion.convertTagStringToList; - -import com.google.common.base.Strings; -import com.google.common.collect.Maps; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.storage.BigQueryStorageManager; -import feast.core.util.TypeConversion; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import java.util.ArrayList; -import java.util.List; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.FetchType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -/** - * A row in the registry storing information about a single feature, including its relevant - * metadata. - */ -@AllArgsConstructor -@Getter -@Setter -@Entity -@Table(name = "features") -public class FeatureInfo extends AbstractTimestampEntity { - - @Id - private String id; - - @Column(name = "name", nullable = false) - private String name; - - @Column(name = "owner", nullable = false) - private String owner; - - @Column(name = "description", nullable = false) - private String description; - - @Column(name = "uri", nullable = false) - private String uri; - - @Enumerated(EnumType.STRING) - private ValueType.Enum valueType; - - @ManyToOne - @JoinColumn(name = "entity") - private EntityInfo entity; - - @ManyToOne(optional = true, fetch = FetchType.LAZY) - @JoinColumn(name = "feature_group") - private FeatureGroupInfo featureGroup; - - @Column(name = "tags") - private String tags; - - @Column(name = "options") - private String options; - - @Column(name = "big_query_view") - private String bigQueryView; - - @ManyToMany(mappedBy = "features") - private List jobs; - - @Column(name = "enabled") - private boolean enabled = true; - - public FeatureInfo() { - super(); - } - - public FeatureInfo( - FeatureSpec spec, - EntityInfo entityInfo, - FeatureGroupInfo featureGroupInfo) { - this.id = spec.getId(); - this.name = spec.getName(); - this.owner = spec.getOwner(); - this.description = spec.getDescription(); - this.uri = spec.getUri(); - this.valueType = spec.getValueType(); - this.entity = entityInfo; - this.featureGroup = featureGroupInfo; - this.bigQueryView = ""; - this.tags = String.join(",", spec.getTagsList()); - this.options = TypeConversion.convertMapToJsonString(spec.getOptionsMap()); - } - - public FeatureInfo(FeatureInfo other) { - this.id = other.id; - this.name = other.name; - this.owner = other.owner; - this.description = other.description; - this.uri = other.uri; - this.valueType = other.valueType; - this.entity = other.entity; - this.featureGroup = other.featureGroup; - this.tags = other.tags; - this.options = other.options; - this.bigQueryView = other.bigQueryView; - this.enabled = other.enabled; - this.setLastUpdated(other.getLastUpdated()); - this.setCreated(other.getCreated()); - } - - /** - * Get the feature spec associated with this record. The spec returned by this method will not - * resolve inheritance from associated feature groups. - */ - public FeatureSpec getFeatureSpec() { - FeatureSpec.Builder builder = - FeatureSpec.newBuilder() - .setId(id) - .setName(name) - .setOwner(owner) - .setDescription(description) - .setUri(uri) - .setValueType(valueType) - .setEntity(entity.getName()) - .addAllTags(convertTagStringToList(tags)) - .putAllOptions(convertJsonStringToMap(options)); - if (featureGroup != null) { - builder.setGroup(featureGroup.getId()); - } - return builder.build(); - } - - /* - * Resolve the feature spec with its group settings. - */ - public FeatureInfo resolve() { - if (featureGroup == null) { - return this; - } - FeatureInfo featureInfoCopy = new FeatureInfo(this); - List resolvedTags = new ArrayList<>(); - resolvedTags.addAll(convertTagStringToList(featureInfoCopy.tags)); - resolvedTags.addAll(convertTagStringToList(featureGroup.getTags())); - featureGroup.getOptions(); - featureInfoCopy.tags = String.join(",", resolvedTags); - - return featureInfoCopy; - } - - /** - * Get the feature detail containing both spec and metadata, associated with this record. - */ - public FeatureDetail getFeatureDetail(StorageSpecs storageSpecs) { - return FeatureDetail.newBuilder() - .setSpec(this.getFeatureSpec()) - .setBigqueryView(!Strings.isNullOrEmpty(bigQueryView) ? bigQueryView - : createBigqueryViewLink(storageSpecs.getWarehouseStorageSpec())) - .setEnabled(this.enabled) - .setLastUpdated(TypeConversion.convertTimestamp(this.getLastUpdated())) - .setCreated(TypeConversion.convertTimestamp(this.getCreated())) - .build(); - } - - protected String createBigqueryViewLink(StorageSpec storageSpec) { - if (storageSpec == null || !storageSpec.getType().equals(BigQueryStorageManager.TYPE)) { - return "N.A."; - } - String projectId = storageSpec - .getOptionsOrDefault(BigQueryStorageManager.OPT_BIGQUERY_PROJECT, null); - String dataset = storageSpec - .getOptionsOrDefault(BigQueryStorageManager.OPT_BIGQUERY_DATASET, null); - - return String.format( - "https://bigquery.cloud.google.com/table/%s:%s.%s_view", - projectId, dataset, entity.getName()); - } - - /** - * Updates the feature info with specifications from the incoming feature spec. - * - *

TODO: maybe allow changes to id, store etc if no jobs are feeding into this feature - * - * @param update new feature spec - */ - public void update(FeatureSpec update) throws IllegalArgumentException { - if (!isLegalUpdate(update)) { - throw new IllegalArgumentException( - "Feature already exists. Update only allowed for fields: [owner, description, uri, tags]"); - } - this.owner = update.getOwner(); - this.description = update.getDescription(); - this.uri = update.getUri(); - this.tags = String.join(",", update.getTagsList()); - } - - private boolean isLegalUpdate(FeatureSpec update) { - FeatureSpec spec = this.getFeatureSpec(); - return spec.getName().equals(update.getName()) - && spec.getEntity().equals(update.getEntity()) - && spec.getValueType().equals(update.getValueType()) - && spec.getGroup().equals(update.getGroup()) - && Maps.difference(spec.getOptionsMap(), update.getOptionsMap()).areEqual(); - } -} diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java new file mode 100644 index 00000000000..947a0c6ca2d --- /dev/null +++ b/core/src/main/java/feast/core/model/FeatureSet.java @@ -0,0 +1,146 @@ +package feast.core.model; + +import com.google.protobuf.Duration; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.types.ValueProto.ValueType; +import java.util.ArrayList; +import java.util.List; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.Fetch; +import org.hibernate.annotations.FetchMode; + +@Getter +@Setter +@Entity +@Table(name = "feature_sets") +public class FeatureSet extends AbstractTimestampEntity implements Comparable { + + // Id of the featureSet, defined as name:version + @Id + @Column(name = "id", nullable = false, unique = true) + private String id; + + // Name of the featureSet + @Column(name = "name", nullable = false) + private String name; + + // Version of the featureSet + @Column(name = "version") + private int version; + + // Max allowed staleness for features in this featureSet. + @Column(name = "max_age") + private long maxAgeSeconds; + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @Fetch(value = FetchMode.SUBSELECT) + @JoinColumn(name = "entities") + private List entities; + + // Features inside this featureSet + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @Fetch(value = FetchMode.SUBSELECT) + @JoinColumn(name = "features") + private List features; + + // Source on which feature rows can be found + @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinColumn(name = "source") + private Source source; + + public FeatureSet() { + super(); + } + + public FeatureSet(String name, int version, long maxAgeSeconds, List entities, + List features, + Source source) { + this.id = String.format("%s:%s", name, version); + this.name = name; + this.version = version; + this.maxAgeSeconds = maxAgeSeconds; + this.entities = entities; + this.features = features; + this.source = source; + } + + public static FeatureSet fromProto(FeatureSetSpec featureSetSpec) { + Source source = Source.fromProto(featureSetSpec.getSource()); + String id = String.format("%s:%d", featureSetSpec.getName(), featureSetSpec.getVersion()); + List features = new ArrayList<>(); + for (FeatureSpec feature : featureSetSpec.getFeaturesList()) { + features.add(new Field(id, + feature.getName(), + feature.getValueType())); + } + List entities = new ArrayList<>(); + for (EntitySpec entity : featureSetSpec.getEntitiesList()) { + entities.add(new Field(id, + entity.getName(), + entity.getValueType())); + } + + return new FeatureSet(featureSetSpec.getName(), + featureSetSpec.getVersion(), + featureSetSpec.getMaxAge().getSeconds(), + entities, + features, + source); + } + + public FeatureSetSpec toProto() throws InvalidProtocolBufferException { + List entitySpecs = new ArrayList<>(); + for (Field entity : entities) { + entitySpecs.add(EntitySpec.newBuilder() + .setName(entity.getName()) + .setValueType(ValueType.Enum.valueOf(entity.getType())) + .build()); + } + + List featureSpecs = new ArrayList<>(); + for (Field feature : features) { + featureSpecs.add(FeatureSpec.newBuilder() + .setName(feature.getName()) + .setValueType(ValueType.Enum.valueOf(feature.getType())) + .build()); + } + return FeatureSetSpec.newBuilder() + .setName(name) + .setVersion(version) + .setMaxAge(Duration.newBuilder().setSeconds(maxAgeSeconds)) + .addAllEntities(entitySpecs) + .addAllFeatures(featureSpecs) + .setSource(source.toProto()) + .build(); + } + + /** + * Checks if the given featureSet's schema and source has is different from this one. + * + * @param other FeatureSet to compare to + * @return boolean denoting if the source or schema have changed. + */ + public boolean equalTo(FeatureSet other) throws InvalidProtocolBufferException { + return name.equals(other.getName()) && entities.equals(other.entities) && features + .equals(other.features) && source.equalTo(other.getSource()) + && maxAgeSeconds == other.maxAgeSeconds; + } + + @Override + public int compareTo(FeatureSet o) { + return Integer.compare(version, o.version); + } +} diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java new file mode 100644 index 00000000000..fe618c3e9e3 --- /dev/null +++ b/core/src/main/java/feast/core/model/Field.java @@ -0,0 +1,69 @@ +package feast.core.model; + +import feast.types.ValueProto.ValueType; +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@Entity +@Table(name = "fields") +public class Field { + + // Id of the field, defined as featureSetId.name + @Id + @Column(name = "id", nullable = false, unique = true) + private String id; + + // FeatureSet this feature belongs to + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "feature_set_id") + private FeatureSet featureSet; + + // Name of the feature + @Column(name = "name", nullable = false) + private String name; + + // Type of the feature, should correspond with feast.types.ValueType + @Column(name = "type", nullable = false) + private String type; + + public Field() { + super(); + } + + public Field(String featureSetId, String name, ValueType.Enum type) { + FeatureSet featureSet = new FeatureSet(); + featureSet.setId(featureSetId); + this.featureSet = featureSet; + this.id = String.format("%s:%s", featureSetId, name); + this.name = name; + this.type = type.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Field field = (Field) o; + return name.equals(field.getName()) && + type.equals(field.getType()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), id, featureSet, name, type); + } +} diff --git a/core/src/main/java/feast/core/model/JobInfo.java b/core/src/main/java/feast/core/model/JobInfo.java index ab11a74acc1..e2e61fee080 100644 --- a/core/src/main/java/feast/core/model/JobInfo.java +++ b/core/src/main/java/feast/core/model/JobInfo.java @@ -17,125 +17,95 @@ package feast.core.model; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.core.JobServiceProto.JobServiceTypes.JobDetail; -import feast.core.util.TypeConversion; -import feast.specs.ImportSpecProto; +import com.google.api.Metric; +import feast.core.SourceProto.SourceType; +import java.util.List; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; +import javax.persistence.ManyToMany; +import javax.persistence.ManyToOne; +import javax.persistence.OneToMany; +import javax.persistence.Table; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.Setter; -import javax.persistence.*; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -/** Contains information about a run job. */ +/** + * Contains information about a run job. + */ @AllArgsConstructor @Getter @Setter @Entity @Table(name = "jobs") public class JobInfo extends AbstractTimestampEntity { + // Internal job name. Generated by feast ingestion upon invocation. - @Id private String id; + @Id + private String id; // External job id, generated by the runner and retrieved by feast. // Used internally for job management. @Column(name = "ext_id") private String extId; - // Import job source type - @Column(name = "type") - private String type; - // Runner type @Column(name = "runner") private String runner; - // Job options. Stored as a json string as it is specific to the runner. - @Column(name = "source_options") - private String sourceOptions; + // Source id + @ManyToOne + @JoinColumn(name = "source_id") + private Source source; - // Job options. Stored as a json string as it is specific to the runner. - @Column(name = "job_options") - private String jobOptions; + // Sink id + @ManyToOne + @JoinColumn(name = "store_name") + private Store store; - // Entities populated by the job - @ManyToMany - @JoinTable( - joinColumns = {@JoinColumn(name = "job_id")}, - inverseJoinColumns = {@JoinColumn(name = "entity_id")}) - private List entities; - // Features populated by the job + // FeatureSets populated by the job @ManyToMany @JoinTable( joinColumns = {@JoinColumn(name = "job_id")}, - inverseJoinColumns = {@JoinColumn(name = "feature_id")}) - private List features; + inverseJoinColumns = {@JoinColumn(name = "feature_set_id")}) + private List featureSets; // Job Metrics - @OneToMany(mappedBy = "jobInfo",cascade = CascadeType.ALL, orphanRemoval = true) + @OneToMany(mappedBy = "jobInfo", cascade = CascadeType.ALL) private List metrics; @Enumerated(EnumType.STRING) @Column(name = "status", length = 16) private JobStatus status; - // Raw import spec, stored as a json string. - @Column(name = "raw", length = 40960) - private String raw; - public JobInfo() { super(); } - public JobInfo( - String jobId, - String extId, - String runner, - ImportSpecProto.ImportSpec importSpec, - JobStatus status) - throws InvalidProtocolBufferException { - this.id = jobId; + public JobInfo(String id, String extId, String runner, Source source, Store sink, + List featureSets, JobStatus jobStatus) { + this.id = id; this.extId = extId; - this.type = importSpec.getType(); + this.source = source; this.runner = runner; - this.sourceOptions = TypeConversion.convertMapToJsonString(importSpec.getSourceOptionsMap()); - this.jobOptions = TypeConversion.convertMapToJsonString(importSpec.getJobOptionsMap()); - this.entities = new ArrayList<>(); - for (String entity : importSpec.getEntitiesList()) { - EntityInfo entityInfo = new EntityInfo(); - entityInfo.setName(entity); - this.entities.add(entityInfo); - } - this.features = new ArrayList<>(); - for (ImportSpecProto.Field field : importSpec.getSchema().getFieldsList()) { - if (!field.getFeatureId().equals("")) { - FeatureInfo featureInfo = new FeatureInfo(); - featureInfo.setId(field.getFeatureId()); - this.features.add(featureInfo); - } - } - this.raw = JsonFormat.printer().print(importSpec); - this.status = status; + this.store = sink; + this.featureSets = featureSets; + this.status = jobStatus; + } + + public void updateMetrics(List newMetrics) { + metrics.clear(); + metrics.addAll(newMetrics); } - public JobDetail getJobDetail() { - return JobDetail.newBuilder() - .setId(this.id) - .setExtId(this.extId) - .setType(this.type) - .setRunner(this.runner) - .setStatus(this.status.toString()) - .addAllEntities( - this.entities.stream().map(EntityInfo::getName).collect(Collectors.toList())) - .addAllFeatures( - this.features.stream().map(FeatureInfo::getId).collect(Collectors.toList())) - .setLastUpdated(TypeConversion.convertTimestamp(this.getLastUpdated())) - .setCreated(TypeConversion.convertTimestamp(this.getCreated())) - .build(); + public String getSinkName() { + return store.getName(); } } diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java new file mode 100644 index 00000000000..1f2e538209f --- /dev/null +++ b/core/src/main/java/feast/core/model/Source.java @@ -0,0 +1,188 @@ +package feast.core.model; + +import com.google.common.collect.Sets; +import com.google.protobuf.Message; +import feast.core.SourceProto; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.Source.Builder; +import feast.core.SourceProto.SourceType; +import io.grpc.Status; +import java.util.Set; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; +import lombok.Setter; + +@Setter +@Entity +@Table(name = "sources") +public class Source { + + private static final Set KAFKA_OPTIONS = Sets.newHashSet("bootstrapServers"); + + @Id + @Column(name = "id", updatable = false, nullable = false) + private String id; + + // Type of the source. Should map to feast.types.Source.SourceType + @Column(name = "type", nullable = false) + private String type; + + // Bootstrap servers, comma delimited. Used by kafka sources. + @Column(name = "bootstrap_servers") + private String bootstrapServers; + + // Topics to listen to, comma delimited. Used by kafka sources. + @Column(name = "topics") + private String topics; + + @Column(name = "is_default") + private boolean isDefault; + + public Source() { + super(); + } + + public Source(SourceType type, KafkaSourceConfig config, boolean isDefault) { + if (config.getBootstrapServers().isEmpty() || config.getTopic().isEmpty()) { + throw Status.INVALID_ARGUMENT.withDescription( + "Unsupported source options. Kafka source requires bootstrap servers and topic to be specified.") + .asRuntimeException(); + } + this.type = type.toString(); + this.bootstrapServers = config.getBootstrapServers(); + this.topics = config.getTopic(); + this.isDefault = isDefault; + this.id = generateId(); + } + + /** + * Construct a source facade object from a given proto object. + * + * @param sourceProto SourceProto.Source object + * @return Source facade object + */ + public static Source fromProto(SourceProto.Source sourceProto) { + if (sourceProto.equals(SourceProto.Source.getDefaultInstance())) { + Source source = new Source(); + source.isDefault = true; + return source; + } + + switch (sourceProto.getType()) { + case KAFKA: + return new Source(sourceProto.getType(), sourceProto.getKafkaSourceConfig(), false); + case UNRECOGNIZED: + default: + throw Status.INVALID_ARGUMENT + .withDescription("Unsupported source type. Only [KAFKA] is supported.") + .asRuntimeException(); + } + } + + /** + * Convert this object to its equivalent proto object. + * + * @return SourceProto.Source + */ + public SourceProto.Source toProto() { + Builder builder = SourceProto.Source.newBuilder() + .setType(SourceType.valueOf(type)); + switch (SourceType.valueOf(type)) { + case KAFKA: + KafkaSourceConfig config = KafkaSourceConfig.newBuilder() + .setBootstrapServers(bootstrapServers) + .setTopic(topics).build(); + return builder.setKafkaSourceConfig(config).build(); + case UNRECOGNIZED: + default: + throw new RuntimeException("Unable to convert source to proto"); + } + } + + /** + * Get the id for this feature source + * + * @return feature source id in the format TYPE/options + */ + public String getId() { + return id; + } + + /** + * Get the options for this feature source + * + * @return feature source options + */ + public Message getOptions() { + switch (SourceType.valueOf(type)) { + case KAFKA: + return KafkaSourceConfig + .newBuilder() + .setBootstrapServers(bootstrapServers) + .setTopic(topics) + .build(); + case UNRECOGNIZED: + default: + throw new RuntimeException("Unable to convert source to proto"); + } + } + + /** + * Get the type of source. + * + * @return SourceType of this feature source + */ + public SourceType getType() { + return SourceType.valueOf(type); + } + + /** + * Indicate whether to use the system defaults or not. + * + * @return boolean indicating whether this feature set source uses defaults. + */ + public boolean isDefault() { + return isDefault; + } + + /** + * Override equality for sources. isDefault is always compared first; if both sources are using + * the default feature source, they will be equal. If not they will be compared based on their + * type-specific options. + * + * @param other other Source + * @return boolean equal + */ + public boolean equalTo(Source other) { + if (other.isDefault && isDefault) { + return true; + } + + if (!type.equals(other.type)) { + return false; + } + + switch (SourceType.valueOf(type)) { + case KAFKA: + return bootstrapServers.equals(other.bootstrapServers) && + topics.equals(other.topics); + case UNRECOGNIZED: + default: + return false; + } + } + + private String generateId() { + switch (SourceType.valueOf(type)) { + case KAFKA: + return String.format("KAFKA/%s/%s", bootstrapServers, topics); + default: + // should not occur + return ""; + } + } +} + + diff --git a/core/src/main/java/feast/core/model/StorageInfo.java b/core/src/main/java/feast/core/model/StorageInfo.java deleted file mode 100644 index f38463c9d22..00000000000 --- a/core/src/main/java/feast/core/model/StorageInfo.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.specs.StorageSpecProto.StorageSpec; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.Setter; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; - -import static feast.core.util.TypeConversion.*; - -/** - * A row in the registry storing information about a single storage specification, including its - * relevant metadata. - */ -@AllArgsConstructor -@Getter -@Setter -@Entity -@Table(name = "storage") -public class StorageInfo extends AbstractTimestampEntity { - - @Id - private String id; - - @Column(name = "type", nullable = false) - private String type; - - @Column(name = "options") - private String options; - - public StorageInfo() { - super(); - } - - public StorageInfo(StorageSpec spec) { - this.id = spec.getId(); - this.type = spec.getType(); - this.options = convertMapToJsonString(spec.getOptionsMap()); - } - - /** - * Get the storage spec associated with this record. - */ - public StorageSpec getStorageSpec() { - return StorageSpec.newBuilder() - .setId(id) - .setType(type) - .putAllOptions(convertJsonStringToMap(options)) - .build(); - } - - /** - * Get the storage detail containing both spec and metadata, associated with this record. - */ - public StorageDetail getStorageDetail() { - return StorageDetail.newBuilder() - .setSpec(this.getStorageSpec()) - .setLastUpdated(convertTimestamp(this.getLastUpdated())) - .build(); - } -} diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java new file mode 100644 index 00000000000..93dd6362e6d --- /dev/null +++ b/core/src/main/java/feast/core/model/Store.java @@ -0,0 +1,118 @@ +package feast.core.model; + +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.StoreProto; +import feast.core.StoreProto.Store.BigQueryConfig; +import feast.core.StoreProto.Store.Builder; +import feast.core.StoreProto.Store.CassandraConfig; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Lob; +import javax.persistence.Table; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; +import com.google.protobuf.util.JsonFormat; + +@Getter +@Setter +@AllArgsConstructor +@Entity +@Table(name = "stores") +public class Store { + + // Name of the store. Must be unique + @Id + @Column(name = "name", nullable = false, unique = true) + private String name; + + // Type of the store, should map to feast.core.Store.StoreType + @Column(name = "type", nullable = false) + private String type; + + // Connection string to the database + @Column(name = "config", nullable = false) + @Lob + private byte[] config; + + // FeatureSets this store is subscribed to, comma delimited. + @Column(name = "subscriptions") + private String subscriptions; + + public Store() { + super(); + } + + public static Store fromProto(StoreProto.Store storeProto) throws IllegalArgumentException { + List subs = new ArrayList<>(); + for (Subscription s : storeProto.getSubscriptionsList()) { + subs.add(convertSubscriptionToString(s)); + } + byte[] config; + switch (storeProto.getType()) { + case REDIS: + config = storeProto.getRedisConfig().toByteArray(); + break; + case BIGQUERY: + config = storeProto.getBigqueryConfig().toByteArray(); + break; + case CASSANDRA: + config = storeProto.getCassandraConfig().toByteArray(); + break; + default: + throw new IllegalArgumentException("Invalid store provided"); + } + return new Store(storeProto.getName(), storeProto.getType().toString(), + config, String.join(",", subs)); + } + + public StoreProto.Store toProto() throws InvalidProtocolBufferException { + List subscriptionProtos = getSubscriptions(); + Builder storeProtoBuilder = StoreProto.Store.newBuilder() + .setName(name) + .setType(StoreType.valueOf(type)) + .addAllSubscriptions(subscriptionProtos); + switch (StoreType.valueOf(type)) { + case REDIS: + RedisConfig redisConfig = RedisConfig.parseFrom(config); + return storeProtoBuilder.setRedisConfig(redisConfig).build(); + case BIGQUERY: + BigQueryConfig bqConfig = BigQueryConfig.parseFrom(config); + return storeProtoBuilder.setBigqueryConfig(bqConfig).build(); + case CASSANDRA: + CassandraConfig cassConfig = CassandraConfig.parseFrom(config); + return storeProtoBuilder.setCassandraConfig(cassConfig).build(); + default: + throw new InvalidProtocolBufferException("Invalid store set"); + } + } + + public List getSubscriptions() { + return Arrays.stream(subscriptions.split(",")) + .map(this::convertStringToSubscription) + .collect(Collectors.toList()); + } + + private static String convertSubscriptionToString(Subscription sub) { + return String.format("%s:%s", sub.getName(), sub.getVersion()); + } + + private Subscription convertStringToSubscription(String sub) { + if (sub.equals("")) { + return Subscription.newBuilder().build(); + } + String[] split = sub.split(":"); + return Subscription.newBuilder() + .setName(split[0]) + .setVersion(split[1]) + .build(); + } +} diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java new file mode 100644 index 00000000000..2cd5cd6778a --- /dev/null +++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java @@ -0,0 +1,204 @@ +package feast.core.service; + +import com.google.common.base.Strings; +import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.SourceProto; +import feast.core.SourceProto.SourceType; +import feast.core.StoreProto; +import feast.core.dao.JobInfoRepository; +import feast.core.exception.JobExecutionException; +import feast.core.exception.RetrievalException; +import feast.core.job.JobManager; +import feast.core.log.Action; +import feast.core.log.AuditLogger; +import feast.core.log.Resource; +import feast.core.model.FeatureSet; +import feast.core.model.JobInfo; +import feast.core.model.JobStatus; +import feast.core.model.Source; +import feast.core.model.Store; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Slf4j +@Service +public class JobCoordinatorService { + + private JobInfoRepository jobInfoRepository; + private JobManager jobManager; + + @Autowired + public JobCoordinatorService( + JobInfoRepository jobInfoRepository, JobManager jobManager) { + this.jobInfoRepository = jobInfoRepository; + this.jobManager = jobManager; + } + + /** + * Start or update a job given the list of FeatureSets to populate and the store to sink to. If + * there has been no change in the featureSet, and there is a running job for the featureSet, this + * method will do nothing. + */ + public JobInfo startOrUpdateJob(List featureSetSpecs, + SourceProto.Source sourceSpec, + StoreProto.Store store) { + Source source = Source.fromProto(sourceSpec); + Optional job = getJob(source.getId(), store.getName()); + if (job.isPresent()) { + Set existingFeatureSetsPopulatedByJob = + job.get().getFeatureSets().stream().map(FeatureSet::getId).collect(Collectors.toSet()); + Set newFeatureSetsPopulatedByJob = + featureSetSpecs.stream() + .map(fs -> fs.getName() + ":" + fs.getVersion()) + .collect(Collectors.toSet()); + if (existingFeatureSetsPopulatedByJob.size() == newFeatureSetsPopulatedByJob.size() + && existingFeatureSetsPopulatedByJob.containsAll(newFeatureSetsPopulatedByJob)) { + return job.get(); + } else { + return updateJob(job.get(), featureSetSpecs, store); + } + } else { + return startJob(createJobId(source.getId(), store.getName()), + featureSetSpecs, sourceSpec, store); + } + } + + /** + * Get the non-terminal job associated with the given featureSet name and store name, if any. + */ + private Optional getJob(String sourceId, String storeName) { + List jobs = + jobInfoRepository.findBySourceIdAndStoreName(sourceId, storeName); + if (jobs.isEmpty()) { + return Optional.empty(); + } + return jobs.stream() + .filter(job -> !(JobStatus.getTerminalState().contains(job.getStatus()))) + .findFirst(); + } + + /** + * Start or update the job to ingest data to the sink. + */ + private JobInfo startJob( + String jobId, List featureSetSpecs, SourceProto.Source source, + StoreProto.Store sinkSpec) { + try { + AuditLogger.log( + Resource.JOB, + jobId, + Action.SUBMIT, + "Building graph and submitting to %s", + jobManager.getRunnerType().getName()); + + String extId = jobManager.startJob(jobId, featureSetSpecs, sinkSpec); + if (extId.isEmpty()) { + throw new RuntimeException( + String.format("Could not submit job: \n%s", "unable to retrieve job external id")); + } + + AuditLogger.log( + Resource.JOB, + jobId, + Action.STATUS_CHANGE, + "Job submitted to runner %s with ext id %s.", + jobManager.getRunnerType().getName(), + extId); + + List featureSets = new ArrayList<>(); + + for (FeatureSetSpec featureSetSpec : featureSetSpecs) { + FeatureSet featureSet = new FeatureSet(); + featureSet.setId(featureSetSpec.getName() + ":" + featureSetSpec.getVersion()); + featureSets.add(featureSet); + } + + JobInfo jobInfo = + new JobInfo( + jobId, + extId, + jobManager.getRunnerType().getName(), + Source.fromProto(source), + Store.fromProto(sinkSpec), + featureSets, + JobStatus.RUNNING); + + return jobInfoRepository.save(jobInfo); + } catch (Exception e) { + updateJobStatus(jobId, JobStatus.ERROR); + AuditLogger.log( + Resource.JOB, + jobId, + Action.STATUS_CHANGE, + "Job failed to be submitted to runner %s. Job status changed to ERROR.", + jobManager.getRunnerType().getName()); + throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); + } + } + + /** + * Update the given job + */ + private JobInfo updateJob( + JobInfo jobInfo, List featureSetSpecs, StoreProto.Store store) { + jobInfo.setFeatureSets( + featureSetSpecs.stream() + .map(spec -> FeatureSet.fromProto(spec)) + .collect(Collectors.toList())); + jobInfo.setStore(Store.fromProto(store)); + String extId = jobManager.updateJob(jobInfo); + jobInfo.setExtId(extId); + return jobInfoRepository.save(jobInfo); + } + + /** + * Drain the given job. If this is successful, the job will start the draining process. When the + * draining process is complete, the job will be cleaned up and removed. + * + *

Batch jobs will be cancelled, as draining these jobs is not supported by beam. + * + * @param id feast-internal id of a job + */ + public void abortJob(String id) { + Optional jobOptional = jobInfoRepository.findById(id); + if (!jobOptional.isPresent()) { + throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", id)); + } + JobInfo job = jobOptional.get(); + if (JobStatus.getTerminalState().contains(job.getStatus())) { + throw new IllegalStateException("Unable to stop job already in terminal state"); + } + jobManager.abortJob(job.getExtId()); + job.setStatus(JobStatus.ABORTING); + + AuditLogger.log(Resource.JOB, id, Action.ABORT, "Triggering draining of job"); + jobInfoRepository.saveAndFlush(job); + } + + /** + * Update a given job's status + */ + public void updateJobStatus(String jobId, JobStatus status) { + Optional jobRecordOptional = jobInfoRepository.findById(jobId); + if (jobRecordOptional.isPresent()) { + JobInfo jobRecord = jobRecordOptional.get(); + jobRecord.setStatus(status); + jobInfoRepository.save(jobRecord); + } + } + + public String createJobId(String sourceId, String storeName) { + String dateSuffix = String.valueOf(Instant.now().toEpochMilli()); + String sourceIdTrunc = sourceId.split("/")[0].toLowerCase(); + String jobId = String.format("%s-to-%s", sourceIdTrunc, storeName) + dateSuffix; + return jobId.replaceAll("_", "-"); + } +} diff --git a/core/src/main/java/feast/core/service/JobManagementService.java b/core/src/main/java/feast/core/service/JobManagementService.java deleted file mode 100644 index 5b2ee9217b6..00000000000 --- a/core/src/main/java/feast/core/service/JobManagementService.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.service; - -import static com.google.common.base.Predicates.not; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.core.JobServiceProto.JobServiceTypes.JobDetail; -import feast.core.config.ImportJobDefaults; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.JobInfoRepository; -import feast.core.dao.MetricsRepository; -import feast.core.exception.JobExecutionException; -import feast.core.exception.RetrievalException; -import feast.core.job.JobManager; -import feast.core.job.Runner; -import feast.core.log.Action; -import feast.core.log.AuditLogger; -import feast.core.log.Resource; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.JobInfo; -import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import feast.core.util.PathUtil; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import java.io.IOException; -import java.nio.file.FileAlreadyExistsException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Instant; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -@Slf4j -@Service -public class JobManagementService { - - private static final String JOB_PREFIX_DEFAULT = "feastimport"; - private static final String UNKNOWN_EXT_JOB_ID = ""; - private static final String IMPORT_JOB_SPECS_FILENAME = "importJobSpecs.yaml"; - - private JobInfoRepository jobInfoRepository; - private MetricsRepository metricsRepository; - private JobManager jobManager; - private ImportJobDefaults defaults; - private SpecService specService; - private StorageSpecs storageSpecs; - - @Autowired - public JobManagementService( - JobInfoRepository jobInfoRepository, - MetricsRepository metricsRepository, - JobManager jobManager, - ImportJobDefaults defaults, - SpecService specService, - StorageSpecs storageSpecs) { - this.jobInfoRepository = jobInfoRepository; - this.metricsRepository = metricsRepository; - this.jobManager = jobManager; - this.defaults = defaults; - this.specService = specService; - this.storageSpecs = storageSpecs; - } - - public void writeImportJobSpecs(ImportJobSpecs importJobSpecs, Path workspace) { - Path destination = workspace.resolve(IMPORT_JOB_SPECS_FILENAME); - log.info("Writing ImportJobSpecs to {}", destination); - try { - String json = JsonFormat.printer().omittingInsignificantWhitespace().print(importJobSpecs); - TypeToken> typeToken = new TypeToken>() { - }; - Map objectMap = new Gson().fromJson(json, typeToken.getType()); - ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); - String yaml = yamlMapper.writer().writeValueAsString(objectMap); - Files.write(destination, Lists.newArrayList(yaml)); - } catch (JsonProcessingException | InvalidProtocolBufferException e) { - throw new JobExecutionException("Cannot serialise to ImportJobSpecs to YAML", e); - } catch (IOException e) { - throw new JobExecutionException( - String.format("Cannot write ImportJobSpecs to workspace %s", destination), e); - } - } - - private ImportJobSpecs buildImportJobSpecs(ImportSpec importSpec, String jobId) { - List entitySpecs = specService.getEntities(importSpec.getEntitiesList()) - .stream() - .map(EntityInfo::getEntitySpec) - .collect(Collectors.toList()); - Set featureIds = importSpec.getSchema().getFieldsList().stream() - .map(Field::getFeatureId).filter(not(Strings::isNullOrEmpty)).collect(Collectors.toSet()); - List featureSpecs = specService.getFeatures(Lists.newArrayList(featureIds)) - .stream() - .map(FeatureInfo::getFeatureSpec) - .collect(Collectors.toList()); - - ImportJobSpecs.Builder importJobSpecsBuilder = ImportJobSpecs.newBuilder() - .setJobId(jobId) - .setImportSpec(importSpec) - .addAllEntitySpecs(entitySpecs) - .addAllFeatureSpecs(featureSpecs); - if (storageSpecs.getServingStorageSpec() != null) { - importJobSpecsBuilder.setServingStorageSpec(storageSpecs.getServingStorageSpec()); - } - if (storageSpecs.getWarehouseStorageSpec() != null) { - importJobSpecsBuilder.setWarehouseStorageSpec(storageSpecs.getWarehouseStorageSpec()); - } - if (storageSpecs.getErrorsStorageSpec() != null) { - importJobSpecsBuilder.setErrorsStorageSpec(storageSpecs.getErrorsStorageSpec()); - } - - return importJobSpecsBuilder.build(); - } - - /** - * Lists all jobs registered to the db. - * - * @return list of JobDetails - */ - @Transactional - public List listJobs() { - List jobs = jobInfoRepository.findAll(); - return jobs.stream().map(JobInfo::getJobDetail).collect(Collectors.toList()); - } - - /** - * Gets information regarding a single job. - * - * @param id feast-internal job id - * @return JobDetail for that job - */ - @Transactional - public JobDetail getJob(String id) { - Optional job = jobInfoRepository.findById(id); - if (!job.isPresent()) { - throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", id)); - } - JobDetail.Builder jobDetailBuilder = job.get().getJobDetail().toBuilder(); - List metrics = metricsRepository.findByJobInfo_Id(id); - for (Metrics metric : metrics) { - jobDetailBuilder.putMetrics(metric.getName(), metric.getValue()); - } - return jobDetailBuilder.build(); - } - - - /** - * Submit ingestion job to runner. - * - * @param importSpec import spec of the ingestion job - * @param namePrefix name prefix of the ingestion job - * @return feast job ID. - */ - public String submitJob(ImportSpec importSpec, String namePrefix) { - String jobId = createJobId(namePrefix); - Path workspace = PathUtil.getPath(defaults.getWorkspace()).resolve(jobId); - try { - Files.createDirectory(workspace); - } catch (FileAlreadyExistsException e) { - } catch (IOException e) { - throw new RuntimeException( - String.format("Could not initialise job workspace job: %s", workspace.toString()), e); - } - ImportJobSpecs importJobSpecs = buildImportJobSpecs(importSpec, jobId); - writeImportJobSpecs(importJobSpecs, workspace); - - boolean isDirectRunner = Runner.DIRECT.getName().equals(defaults.getRunner()); - try { - if (!isDirectRunner) { - JobInfo jobInfo = - new JobInfo(jobId, UNKNOWN_EXT_JOB_ID, defaults.getRunner(), importSpec, - JobStatus.PENDING); - jobInfoRepository.save(jobInfo); - } - - AuditLogger.log( - Resource.JOB, - jobId, - Action.SUBMIT, - "Building graph and submitting to %s", - defaults.getRunner()); - - String extId = jobManager.submitJob(importJobSpecs, workspace); - if (extId.isEmpty()) { - throw new RuntimeException( - String.format("Could not submit job: \n%s", "unable to retrieve job external id")); - } - - AuditLogger.log( - Resource.JOB, - jobId, - Action.STATUS_CHANGE, - "Job submitted to runner %s with ext id %s.", - defaults.getRunner(), - extId); - - if (isDirectRunner) { - JobInfo jobInfo = - new JobInfo(jobId, extId, defaults.getRunner(), importSpec, JobStatus.COMPLETED); - jobInfoRepository.save(jobInfo); - } else { - updateJobExtId(jobId, extId); - } - return jobId; - } catch (Exception e) { - updateJobStatus(jobId, JobStatus.ERROR); - AuditLogger.log( - Resource.JOB, - jobId, - Action.STATUS_CHANGE, - "Job failed to be submitted to runner %s. Job status changed to ERROR.", - defaults.getRunner()); - throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); - } - } - - /** - * Drain the given job. If this is successful, the job will start the draining process. When the - * draining process is complete, the job will be cleaned up and removed. - * - *

Batch jobs will be cancelled, as draining these jobs is not supported by beam. - * - * @param id feast-internal id of a job - */ - public void abortJob(String id) { - Optional jobOptional = jobInfoRepository.findById(id); - if (!jobOptional.isPresent()) { - throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", id)); - } - JobInfo job = jobOptional.get(); - if (JobStatus.getTerminalState().contains(job.getStatus())) { - throw new IllegalStateException("Unable to stop job already in terminal state"); - } - jobManager.abortJob(job.getExtId()); - job.setStatus(JobStatus.ABORTING); - - AuditLogger.log(Resource.JOB, id, Action.ABORT, "Triggering draining of job"); - jobInfoRepository.saveAndFlush(job); - } - - /** - * Update a given job's status - */ - void updateJobStatus(String jobId, JobStatus status) { - Optional jobRecordOptional = jobInfoRepository.findById(jobId); - if (jobRecordOptional.isPresent()) { - JobInfo jobRecord = jobRecordOptional.get(); - jobRecord.setStatus(status); - jobInfoRepository.save(jobRecord); - } - } - - /** - * Update a given job's external id - */ - void updateJobExtId(String jobId, String jobExtId) { - Optional jobRecordOptional = jobInfoRepository.findById(jobId); - if (jobRecordOptional.isPresent()) { - JobInfo jobRecord = jobRecordOptional.get(); - jobRecord.setExtId(jobExtId); - jobInfoRepository.save(jobRecord); - } - } - - private String createJobId(String namePrefix) { - String dateSuffix = String.valueOf(Instant.now().toEpochMilli()); - return namePrefix.isEmpty() ? JOB_PREFIX_DEFAULT + dateSuffix : namePrefix + dateSuffix; - } -} diff --git a/core/src/main/java/feast/core/service/JobStatusService.java b/core/src/main/java/feast/core/service/JobStatusService.java new file mode 100644 index 00000000000..e47ebd39d39 --- /dev/null +++ b/core/src/main/java/feast/core/service/JobStatusService.java @@ -0,0 +1,64 @@ +package feast.core.service; + +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + + +@Slf4j +@Service +public class JobStatusService { +// +// private JobInfoRepository jobInfoRepository; +// private MetricsRepository metricsRepository; +// +// @Autowired +// public JobStatusService( +// JobInfoRepository jobInfoRepository, +// MetricsRepository metricsRepository) { +// this.jobInfoRepository = jobInfoRepository; +// this.metricsRepository = metricsRepository; +// } +// +// /** +// * Lists all jobs registered to the db, sorted by provided orderBy +// * +// * @param orderBy list order +// * @return list of JobDetails +// */ +// @Transactional +// public List listJobs(Sort orderBy) { +// List jobs = jobInfoRepository.findAll(orderBy); +// return jobs.stream().map(JobInfo::getJobDetail).collect(Collectors.toList()); +// } +// +// /** +// * Lists all jobs registered to the db, sorted chronologically by creation time +// * +// * @return list of JobDetails +// */ +// @Transactional +// public List listJobs() { +// return listJobs(Sort.by(Sort.Direction.ASC, "created")); +// } +// +// /** +// * Gets information regarding a single job. +// * +// * @param id feast-internal job id +// * @return JobDetail for that job +// */ +// @Transactional +// public JobDetail getJob(String id) { +// Optional job = jobInfoRepository.findById(id); +// if (!job.isPresent()) { +// throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", id)); +// } +// JobDetail.Builder jobDetailBuilder = job.get().getJobDetail().toBuilder(); +// List metrics = metricsRepository.findByJobInfo_Id(id); +// for (Metrics metric : metrics) { +// jobDetailBuilder.putMetrics(metric.getName(), metric.getValue()); +// } +// return jobDetailBuilder.build(); +// } + +} diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java index 168bc056d91..2b3403eaba8 100644 --- a/core/src/main/java/feast/core/service/SpecService.java +++ b/core/src/main/java/feast/core/service/SpecService.java @@ -17,33 +17,37 @@ package feast.core.service; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.protobuf.util.JsonFormat; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.EntityInfoRepository; -import feast.core.dao.FeatureGroupInfoRepository; -import feast.core.dao.FeatureInfoRepository; -import feast.core.exception.RegistrationException; +import static feast.core.validators.Matchers.checkValidCharacters; +import static feast.core.validators.Matchers.checkValidFeatureSetFilterName; + +import com.google.common.collect.Ordering; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status; +import feast.core.CoreServiceProto.GetFeatureSetRequest; +import feast.core.CoreServiceProto.GetFeatureSetResponse; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.ListStoresRequest; +import feast.core.CoreServiceProto.ListStoresResponse; +import feast.core.CoreServiceProto.ListStoresResponse.Builder; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.SourceProto; +import feast.core.StoreProto; +import feast.core.dao.FeatureSetRepository; +import feast.core.dao.StoreRepository; import feast.core.exception.RetrievalException; -import feast.core.log.Action; -import feast.core.log.AuditLogger; -import feast.core.log.Resource; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.storage.SchemaManager; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.HashMap; +import feast.core.model.FeatureSet; +import feast.core.model.Source; +import feast.core.model.Store; +import feast.core.validators.FeatureSetValidator; import java.util.List; -import java.util.Map; -import java.util.Set; -import lombok.Getter; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -56,288 +60,257 @@ @Service public class SpecService { - private final EntityInfoRepository entityInfoRepository; - private final FeatureInfoRepository featureInfoRepository; - private final FeatureGroupInfoRepository featureGroupInfoRepository; - private final SchemaManager schemaManager; - @Getter - private final StorageSpecs storageSpecs; + private final FeatureSetRepository featureSetRepository; + private final StoreRepository storeRepository; + private final Source defaultSource; + + private final Pattern versionPattern = Pattern + .compile("^(?[\\>\\<\\=]{0,2})(?\\d*)$"); @Autowired public SpecService( - EntityInfoRepository entityInfoRegistry, - FeatureInfoRepository featureInfoRegistry, - FeatureGroupInfoRepository featureGroupInfoRepository, - SchemaManager schemaManager, - StorageSpecs storageSpecs) { - this.entityInfoRepository = entityInfoRegistry; - this.featureInfoRepository = featureInfoRegistry; - this.featureGroupInfoRepository = featureGroupInfoRepository; - this.schemaManager = schemaManager; - this.storageSpecs = storageSpecs; + FeatureSetRepository featureSetRepository, + StoreRepository storeRepository, + Source defaultSource) { + this.featureSetRepository = featureSetRepository; + this.storeRepository = storeRepository; + this.defaultSource = defaultSource; } /** - * Retrieve a set of entity infos from the registry. + * Get a feature set matching the feature name and version provided in the filter. The name + * is required. If the version is provided then it will be used for the lookup. If the version + * is omitted then the latest version will be returned. * - * @param ids - list of entity names - * @return a list of EntityInfos matching the ids given - * @throws RetrievalException if any of the requested ids is not found - * @throws IllegalArgumentException if the list of ids is empty + * @param GetFeatureSetRequest containing the name and version of the feature set + * @return GetFeatureSetResponse containing a single feature set */ - public List getEntities(List ids) { - if (ids.size() == 0) { - throw new IllegalArgumentException("ids cannot be empty"); - } - Set dedupIds = Sets.newHashSet(ids); + public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) + throws InvalidProtocolBufferException { - List entityInfos = this.entityInfoRepository.findAllById(dedupIds); - if (entityInfos.size() < dedupIds.size()) { - throw new RetrievalException( - "unable to retrieve all entities requested " + ids); - } - return entityInfos; - } + // Validate input arguments + checkValidCharacters(request.getName(), "featureSetName"); + if (request.getName().isEmpty()) { + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription("No feature set name provided") + .asRuntimeException(); + } + if (request.getVersion() < 0){ + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription("Version number cannot be less than 0") + .asRuntimeException(); + } - /** - * Retrieves all entities in the registry - * - * @return list of EntityInfos - * @throws RetrievalException if retrieval fails - */ - public List listEntities() { - return this.entityInfoRepository.findAll(); - } + // Find a list of feature sets with the requested name + List featureSets = featureSetRepository.findByNameWithWildcard(request.getName()); - /** - * Retrieve a set of feature infos from the registry. - * - * @param ids - list of feature ids - * @return a list of FeatureInfos matching the ids given - * @throws RetrievalException if any of the requested ids is not found - * @throws IllegalArgumentException if the list of ids is empty - */ - public List getFeatures(List ids) { - if (ids.size() == 0) { - throw new IllegalArgumentException("ids cannot be empty"); - } - Set dedupIds = Sets.newHashSet(ids); + // Filter the list based on version + if (request.getVersion() == 0){ + // Version is not set, filter list to latest version + featureSets = Ordering.natural().reverse() + .sortedCopy(featureSets).subList(0, featureSets.size() == 0 ? 0 : 1); + } else if(request.getVersion() > 0) { + // Version is set, find specific version + featureSets = featureSets.stream() + .filter(fs -> request.getVersion() == fs.getVersion()).collect(Collectors.toList()); + } - List featureInfos = this.featureInfoRepository.findAllById(dedupIds); - if (featureInfos.size() < dedupIds.size()) { - throw new RetrievalException( - "unable to retrieve all features requested: " + ids); - } - return featureInfos; - } + // Validate remaining items + if (featureSets.size() == 0){ + throw io.grpc.Status.NOT_FOUND + .withDescription("Feature set could not be found") + .asRuntimeException(); + } + if (featureSets.size() > 1){ + throw io.grpc.Status.INTERNAL + .withDescription(String.format("Multiple feature sets found with the name %s and " + + "version %s", request.getName(), request.getVersion())) + .asRuntimeException(); + } - /** - * Retrieves all features in the registry - * - * @return list of FeatureInfos - * @throws RetrievalException if retrieval fails - */ - public List listFeatures() { - return this.featureInfoRepository.findAll(); + // Only a single item in list, return successfully + return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSets.get(0).toProto()).build(); } /** - * Retrieve a set of feature group infos from the registry. + * Get featureSets matching the feature name and version provided in the filter. If the feature + * name is not provided, the method will return all featureSets currently registered to Feast. + * + * The feature set name in the filter accepts any valid regex string. All matching featureSets + * will be returned. * - * @param ids - list of feature group ids - * @return a list of FeatureGroupInfos matching the ids given - * @throws RetrievalException if any of the requested ids is not found - * @throws IllegalArgumentException if the list of ids is empty + * The version filter is optional; If not provided, this method will return all featureSet + * versions of the featureSet name provided. Valid version filters should optionally contain a + * comparator (<, <=, >, etc) and a version number, e.g. 10, <10, >=1 + * + * @param filter filter containing the desired featureSet name and version filter + * @return ListFeatureSetsResponse with list of featureSets found matching the filter */ - public List getFeatureGroups(List ids) { - if (ids.size() == 0) { - throw new IllegalArgumentException("ids cannot be empty"); + public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter filter) + throws InvalidProtocolBufferException { + String name = filter.getFeatureSetName(); + checkValidFeatureSetFilterName(name, "featureSetName"); + List featureSets; + if (name.equals("")) { + featureSets = featureSetRepository.findAll(); + } else { + featureSets = featureSetRepository.findByNameWithWildcard(name.replace('*', '%')); + featureSets = featureSets.stream().filter(getVersionFilter(filter.getFeatureSetVersion())) + .collect(Collectors.toList()); } - Set dedupIds = Sets.newHashSet(ids); - - List featureGroupInfos = this.featureGroupInfoRepository - .findAllById(dedupIds); - if (featureGroupInfos.size() < dedupIds.size()) { - throw new RetrievalException( - "unable to retrieve all feature groups requested " + dedupIds); + ListFeatureSetsResponse.Builder response = ListFeatureSetsResponse.newBuilder(); + for (FeatureSet featureSet : featureSets) { + response.addFeatureSets(featureSet.toProto()); } - return featureGroupInfos; + return response.build(); } /** - * Retrieves all feature groups in the registry + * Get stores matching the store name provided in the filter. If the store name is not provided, + * the method will return all stores currently registered to Feast. * - * @return list of FeatureGroupInfos - * @throws RetrievalException if retrieval fails + * @param filter filter containing the desired store name + * @return ListStoresResponse containing list of stores found matching the filter */ - public List listFeatureGroups() { - return this.featureGroupInfoRepository.findAll(); + public ListStoresResponse listStores(ListStoresRequest.Filter filter) { + try { + String name = filter.getName(); + if (name.equals("")) { + Builder responseBuilder = ListStoresResponse.newBuilder(); + for (Store store : storeRepository.findAll()) { + responseBuilder.addStore(store.toProto()); + } + return responseBuilder.build(); + } + Store store = storeRepository.findById(name) + .orElseThrow(() -> new RetrievalException(String.format("Store with name '%s' not found", + name))); + return ListStoresResponse.newBuilder() + .addStore(store.toProto()) + .build(); + } catch (InvalidProtocolBufferException e) { + throw io.grpc.Status.NOT_FOUND + .withDescription("Unable to retrieve stores") + .withCause(e) + .asRuntimeException(); + } } /** - * Retrieve a set of storage infos from the registry. + * Adds the featureSet to the repository, and prepares the sink for the feature creator to write + * to. If there is a change in the featureSet's schema or source, the featureSet version will be + * incremented. + * + * This function is idempotent. If no changes are detected in the incoming featureSet's schema, + * this method will update the incoming featureSet spec with the latest version stored in the + * repository, and return that. * - * @param ids - List of storage ids - * @return a list of StorageInfos matching the ids given - * @throws RetrievalException if any of the requested ids is not found - * @throws IllegalArgumentException if the list of ids is empty + * @param newFeatureSetSpec featureSet to add. */ - public List getStorage(List ids) { - if (ids.size() == 0) { - throw new IllegalArgumentException("ids cannot be empty"); - } - Set dedupIds = Sets.newHashSet(ids); - List storageInfos = Lists.newArrayList(); - StorageSpecs storageSpecs = getStorageSpecs(); - Map map = new HashMap<>(); - if (storageSpecs.getServingStorageSpec() != null) { - map.put(storageSpecs.getServingStorageSpec().getId(), storageSpecs.getServingStorageSpec()); - } - if (storageSpecs.getWarehouseStorageSpec() != null) { - map.put(storageSpecs.getWarehouseStorageSpec().getId(), storageSpecs.getWarehouseStorageSpec()); - } - for (String id : dedupIds) { - if (map.containsKey(id)) { - storageInfos.add(new StorageInfo(map.get(id))); + public ApplyFeatureSetResponse applyFeatureSet(FeatureSetSpec newFeatureSetSpec) + throws InvalidProtocolBufferException { + FeatureSetValidator.validateSpec(newFeatureSetSpec); + List existingFeatureSets = featureSetRepository + .findByName(newFeatureSetSpec.getName()); + if (existingFeatureSets.size() == 0) { + newFeatureSetSpec = newFeatureSetSpec.toBuilder().setVersion(1).build(); + + } else { + existingFeatureSets = Ordering.natural().reverse().sortedCopy(existingFeatureSets); + FeatureSet latest = existingFeatureSets.get(0); + FeatureSet featureSet = FeatureSet.fromProto(newFeatureSetSpec); + + // If the featureSet remains unchanged, we do nothing. + if (featureSet.equalTo(latest)) { + newFeatureSetSpec = newFeatureSetSpec.toBuilder() + .setVersion(latest.getVersion()) + .build(); + return ApplyFeatureSetResponse.newBuilder() + .setFeatureSet(newFeatureSetSpec) + .setStatus(Status.NO_CHANGE) + .build(); } + newFeatureSetSpec = newFeatureSetSpec.toBuilder() + .setVersion(latest.getVersion() + 1) + .build(); } - if (dedupIds.size() != storageInfos.size()) { - throw new RetrievalException( - "unable to retrieve all storage requested: " + ids); - + FeatureSet featureSet = FeatureSet.fromProto(newFeatureSetSpec); + if (newFeatureSetSpec.getSource() == SourceProto.Source.getDefaultInstance()) { + featureSet.setSource(defaultSource); } - return storageInfos; - } + featureSetRepository.saveAndFlush(featureSet); - /** - * Retrieves all storage specs in the registry - * - * @return list of StorageInfos - * @throws RetrievalException if retrieval fails - */ - public List listStorage() { - return Lists.newArrayList( - new StorageInfo(getStorageSpecs().getServingStorageSpec()), - new StorageInfo(getStorageSpecs().getWarehouseStorageSpec())); + return ApplyFeatureSetResponse.newBuilder() + .setFeatureSet(featureSet.toProto()) + .setStatus(Status.CREATED) + .build(); } /** - * Applies the given feature spec to the registry. If the feature does not yet exist, it will be - * registered to the system. If it does, the existing feature will be updated with the new - * information. - * - *

Note that specifications that will affect downstream resources (e.g. id, storage location) - * cannot be changed. + * UpdateStore updates the repository with the new given store. * - * @param spec FeatureSpec - * @return registered FeatureInfo - * @throws RegistrationException if registration fails + * @param updateStoreRequest containing the new store definition + * @return UpdateStoreResponse containing the new store definition + * @throws InvalidProtocolBufferException */ - public FeatureInfo applyFeature(FeatureSpec spec) { - try { - FeatureInfo featureInfo = featureInfoRepository.findById(spec.getId()).orElse(null); - Action action; - if (featureInfo != null) { - featureInfo.update(spec); - action = Action.UPDATE; - } else { - EntityInfo entity = entityInfoRepository.findById(spec.getEntity()).orElse(null); - FeatureGroupInfo featureGroupInfo = - featureGroupInfoRepository.findById(spec.getGroup()).orElse(null); - featureInfo = new FeatureInfo(spec, entity, featureGroupInfo); - FeatureInfo resolvedFeatureInfo = featureInfo.resolve(); - FeatureSpec resolvedFeatureSpec = resolvedFeatureInfo.getFeatureSpec(); - schemaManager.registerFeature(resolvedFeatureSpec); - action = Action.REGISTER; - } - FeatureInfo out = featureInfoRepository.saveAndFlush(featureInfo); - if (!out.getId().equals(spec.getId())) { - throw new RegistrationException("failed to register or update feature"); - } - AuditLogger.log( - Resource.FEATURE, - spec.getId(), - action, - "Feature applied: %s", - JsonFormat.printer().print(spec)); - return out; + public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) + throws InvalidProtocolBufferException { + StoreProto.Store newStoreProto = updateStoreRequest.getStore(); + Store existingStore = storeRepository.findById(newStoreProto.getName()).orElse(null); - } catch (Exception e) { - throw new RegistrationException( - Strings.lenientFormat("Failed to apply feature %s: %s", spec, e.getMessage()), e); + // Do nothing if no change + if (existingStore != null && existingStore.toProto().equals(newStoreProto)) { + return UpdateStoreResponse.newBuilder() + .setStatus(UpdateStoreResponse.Status.NO_CHANGE) + .setStore(updateStoreRequest.getStore()) + .build(); } + + Store newStore = Store.fromProto(newStoreProto); + storeRepository.save(newStore); + return UpdateStoreResponse.newBuilder() + .setStatus(UpdateStoreResponse.Status.UPDATED) + .setStore(updateStoreRequest.getStore()) + .build(); } - /** - * Applies the given feature group spec to the registry. If the entity does not yet exist, it will - * be registered to the system. Otherwise, the fields will be updated as per the given feature - * group spec. - * - * @param spec FeatureGroupSpec - * @return registered FeatureGroupInfo - * @throws RegistrationException if registration fails - */ - public FeatureGroupInfo applyFeatureGroup(FeatureGroupSpec spec) { - try { - FeatureGroupInfo featureGroupInfo = - featureGroupInfoRepository.findById(spec.getId()).orElse(null); - Action action; - if (featureGroupInfo != null) { - featureGroupInfo.update(spec); - action = Action.UPDATE; - } else { - featureGroupInfo = new FeatureGroupInfo(spec); - action = Action.REGISTER; - } - FeatureGroupInfo out = featureGroupInfoRepository.saveAndFlush(featureGroupInfo); - if (!out.getId().equals(spec.getId())) { - throw new RegistrationException("failed to register or update feature group"); - } - AuditLogger.log( - Resource.FEATURE_GROUP, - spec.getId(), - action, - "Feature group applied: %s", - JsonFormat.printer().print(spec)); - return out; - } catch (Exception e) { - throw new RegistrationException( - Strings.lenientFormat( - "Failed to register new feature group %s: %s", spec, e.getMessage()), - e); + private Predicate getVersionFilter(String versionFilter) { + if (versionFilter.equals("")) { + return v -> true; } - } + Matcher match = versionPattern.matcher(versionFilter); + match.find(); - /** - * Applies the given entity spec to the registry. If the entity does not yet exist, it will be - * registered to the system. Otherwise, the fields will be updated as per the given entity spec. - * - * @param spec EntitySpec - * @return registered EntityInfo - * @throws RegistrationException if registration fails - */ - public EntityInfo applyEntity(EntitySpec spec) { - try { - EntityInfo entityInfo = entityInfoRepository.findById(spec.getName()).orElse(null); - Action action; - if (entityInfo != null) { - entityInfo.update(spec); - action = Action.UPDATE; - } else { - entityInfo = new EntityInfo(spec); - action = Action.REGISTER; - } - EntityInfo out = entityInfoRepository.saveAndFlush(entityInfo); - if (!out.getName().equals(spec.getName())) { - throw new RegistrationException("failed to register or update entity"); - } - AuditLogger.log( - Resource.FEATURE_GROUP, spec.getName(), action, "Entity: %s", - JsonFormat.printer().print(spec)); - return out; - } catch (Exception e) { - throw new RegistrationException( - Strings.lenientFormat("Failed to apply entity %s: %s", spec, e.getMessage()), e); + if (!match.matches()) { + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription(String.format( + "Invalid version string '%s' provided. Version string may either " + + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", + versionFilter)) + .asRuntimeException(); + } + + int versionNumber = Integer.valueOf(match.group("version")); + String comparator = match.group("comparator"); + switch (comparator) { + case "<": + return v -> v.getVersion() < versionNumber; + case ">": + return v -> v.getVersion() > versionNumber; + case "<=": + return v -> v.getVersion() <= versionNumber; + case ">=": + return v -> v.getVersion() >= versionNumber; + case "": + return v -> v.getVersion() == versionNumber; + default: + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription(String.format( + "Invalid comparator '%s' provided. Version string may either " + + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", + comparator)) + .asRuntimeException(); } } + } diff --git a/core/src/main/java/feast/core/storage/BigQueryStorageManager.java b/core/src/main/java/feast/core/storage/BigQueryStorageManager.java deleted file mode 100644 index 00f64f8a86e..00000000000 --- a/core/src/main/java/feast/core/storage/BigQueryStorageManager.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import com.google.cloud.bigquery.*; -import com.google.cloud.bigquery.TimePartitioning.Type; -import com.google.common.base.Strings; -import com.google.protobuf.util.JsonFormat; -import feast.core.log.Action; -import feast.core.log.AuditLogger; -import feast.core.log.Resource; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.ValueType; -import feast.types.ValueProto.ValueType.Enum; -import lombok.extern.slf4j.Slf4j; - -import java.util.*; -import java.util.stream.Collectors; - -@Slf4j -public class BigQueryStorageManager implements StorageManager { - - public static final String TYPE = "bigquery"; - - public static final String OPT_BIGQUERY_PROJECT = "project"; - public static final String OPT_BIGQUERY_DATASET = "dataset"; - public static final String OPT_BIGQUERY_TEMP_LOCATION = "tempLocation"; // gcs or local. - - private static final String FIELD_ID = "id"; - private static final String FIELD_EVENT_TIMESTAMP = "event_timestamp"; - private static final String FIELD_CREATED_TIMESTAMP = "created_timestamp"; - private static final String FIELD_JOB_ID = "job_id"; - - private String id; - private final BigQuery bigQuery; - private final String datasetName; - private final String project; - private final BigQueryViewTemplater viewTemplater; - - /** - * Mapping of Bigquery types to feast supported types - */ - private static final Map FEAST_TO_BIGQUERY_TYPE_MAP = - new HashMap<>(); - - static { - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.BOOL, LegacySQLTypeName.BOOLEAN); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.INT32, LegacySQLTypeName.INTEGER); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.INT64, LegacySQLTypeName.INTEGER); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.BYTES, LegacySQLTypeName.BYTES); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.FLOAT, LegacySQLTypeName.FLOAT); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.DOUBLE, LegacySQLTypeName.FLOAT); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.TIMESTAMP, LegacySQLTypeName.TIMESTAMP); - FEAST_TO_BIGQUERY_TYPE_MAP.put(Enum.STRING, LegacySQLTypeName.STRING); - } - - public BigQueryStorageManager( - String id, - BigQuery bigQuery, - String project, - String datasetName, - BigQueryViewTemplater viewTemplater) { - this.id = id; - this.bigQuery = bigQuery; - this.datasetName = datasetName; - this.project = project; - this.viewTemplater = viewTemplater; - } - - /** - * Update the BigQuery schema of this table given the addition of this feature. - * - * @param featureSpec specification of the new feature. - */ - @Override - public void registerNewFeature(FeatureSpec featureSpec) { - if (!isDatasetExists(datasetName)) { - DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); - bigQuery.create(datasetInfo); - } - - String tableName = createTableName(featureSpec); - TableId tableId = TableId.of(datasetName, tableName); - - Table table = bigQuery.getTable(tableId); - if (table == null) { - Schema schema = - Schema.of( - createField(FIELD_ID, Enum.STRING, ""), - createField(FIELD_EVENT_TIMESTAMP, Enum.TIMESTAMP, FIELD_EVENT_TIMESTAMP), - createField(FIELD_CREATED_TIMESTAMP, Enum.TIMESTAMP, FIELD_CREATED_TIMESTAMP), - createField(FIELD_JOB_ID, Enum.STRING, FIELD_JOB_ID), - createFeatureField(featureSpec)); - TableDefinition tableDefinition = - StandardTableDefinition.newBuilder() - .setSchema(schema) - .setTimePartitioning(TimePartitioning.of(Type.DAY)) - .build(); - TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); - checkTableCreation(bigQuery.create(tableInfo), featureSpec); - - createOrUpdateView(tableName, Arrays.asList(featureSpec.getName())); - return; - } - - Schema existingSchema = table.getDefinition().getSchema(); - Field newField = createFeatureField(featureSpec); - if (isFieldExist(newField, existingSchema)) { - return; - } - - List fields = new ArrayList<>(existingSchema.getFields()); - fields.add(newField); - Schema newSchema = Schema.of(fields); - TableDefinition tableDefinition = StandardTableDefinition.of(newSchema); - TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); - checkTableCreation(bigQuery.update(tableInfo), featureSpec); - createOrUpdateView( - tableName, - fields - .stream() - .map(Field::getName) - .filter( - f -> - !f.equals(FIELD_ID) - && !f.equals(FIELD_CREATED_TIMESTAMP) - && !f.equals(FIELD_EVENT_TIMESTAMP) - && !f.equals(FIELD_JOB_ID)) - - .collect(Collectors.toList())); - AuditLogger.log( - Resource.STORAGE, - this.id, - Action.SCHEMA_UPDATE, - "Bigquery schema updated for feature %s", - featureSpec.getId()); - } - - private void checkTableCreation(Table table, FeatureSpec featureSpec) { - if (table == null) { - throw new StorageInitializationException( - Strings.lenientFormat( - "Bigquery table creation failed. Possibly linked to BQ rate limiting, please try again later.")); - } else { - FieldList fields = table.getDefinition().getSchema().getFields(); - List fieldNames = fields.stream().map(Field::getName).collect(Collectors.toList()); - if (!fieldNames.contains(featureSpec.getName())) { - throw new StorageInitializationException( - Strings.lenientFormat( - "Bigquery table creation failed. Possibly linked to BQ rate limiting, please try again later.")); - } - } - } - - private boolean isFieldExist(Field field, Schema existingSchema) { - for (Field existingField : existingSchema.getFields()) { - if (field.getName().equals(existingField.getName())) { - return true; - } - } - return false; - } - - private Field createField(String name, Enum valueType, String description) { - Field.Builder fieldBuilder = Field.newBuilder(name, FEAST_TO_BIGQUERY_TYPE_MAP.get(valueType)); - if (description != null) { - fieldBuilder.setDescription(description); - } - return fieldBuilder.build(); - } - - private Field createFeatureField(FeatureSpec featureSpec) { - return createField( - featureSpec.getName(), featureSpec.getValueType(), featureSpec.getDescription()); - } - - private String createTableName(FeatureSpec featureSpec) { - String entityName = featureSpec.getEntity().toLowerCase(); - return String.format("%s", entityName); - } - - private void createOrUpdateView(String tableName, List features) { - String query = viewTemplater.getViewQuery(project, datasetName, tableName, features); - String viewName = String.join("_", tableName, "view"); - if (isViewExists(datasetName, viewName)) { - bigQuery.update(TableInfo.of(TableId.of(datasetName, viewName), ViewDefinition.of(query))); - return; - } - bigQuery.create(TableInfo.of(TableId.of(datasetName, viewName), ViewDefinition.of(query))); - } - - private boolean isViewExists(String datasetName, String viewName) { - return bigQuery.getTable(TableId.of(datasetName, viewName)) != null; - } - - private boolean isDatasetExists(String datasetName) { - return bigQuery.getDataset(datasetName) != null; - } -} diff --git a/core/src/main/java/feast/core/storage/BigQueryViewTemplater.java b/core/src/main/java/feast/core/storage/BigQueryViewTemplater.java deleted file mode 100644 index 0a705f57d0d..00000000000 --- a/core/src/main/java/feast/core/storage/BigQueryViewTemplater.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import java.io.StringReader; -import java.io.StringWriter; -import java.util.List; -import java.util.stream.Collectors; - -/** - * Generates the query for creation or update of a bigquery view - */ -public class BigQueryViewTemplater { - private final Mustache template; - - public BigQueryViewTemplater(String templateString) { - MustacheFactory mf = new DefaultMustacheFactory(); - this.template = mf.compile(new StringReader(templateString), "bqViewTemplate"); - } - - static class TemplateValues { - String project; - String dataset; - String tableName; - List features; - - TemplateValues(String projectId, String dataset, String tableName, List features) { - this.project = projectId; - this.dataset = dataset; - this.tableName = tableName; - this.features = features.stream().map(Feature::new).collect(Collectors.toList()); - } - } - - static class Feature { - String name; - - Feature(String name) { - this.name = name; - } - } - - /** - * Get a query for building or updating the Bigquery view given a set of parameters - * @param projectId BQ google project id - * @param dataset Desired BQ dataset - * @param tableName Name of table to update - * @param features List of features to include in view - * @return BQ view creation query string - */ - public String getViewQuery( - String projectId, String dataset, String tableName, List features) { - TemplateValues values = new TemplateValues(projectId, dataset, tableName, features); - StringWriter writer = new StringWriter(); - template.execute(writer, values); - return writer.toString(); - } -} diff --git a/core/src/main/java/feast/core/storage/BigTableStorageManager.java b/core/src/main/java/feast/core/storage/BigTableStorageManager.java deleted file mode 100644 index f21b3b483d6..00000000000 --- a/core/src/main/java/feast/core/storage/BigTableStorageManager.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import com.google.cloud.bigtable.hbase.BigtableConfiguration; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import feast.core.log.Action; -import feast.core.log.AuditLogger; -import feast.core.log.Resource; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.io.IOException; -import lombok.extern.slf4j.Slf4j; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptorBuilder; - -@Slf4j -public class BigTableStorageManager implements StorageManager { - - public static final String TYPE = "bigtable"; - public static final String STORE_OPT_BIGTABLE_PROJECT = "project"; - public static final String STORE_OPT_BIGTABLE_INSTANCE = "instance"; - public static final String STORE_OPT_BIGTABLE_TABLE_PREFIX = "tablePrefix"; - - public static final String STORE_OPT_BIGTABLE_TABLE_COLUMN_FAMILY = "family"; - public static final String FEATURE_OPT_BIGTABLE_TABLE_COLUMN_FAMILY = "bigtable.family"; - private static final String DEFAULT_COLUMN_FAMILY = "default"; - - private final StorageSpec storageSpec; - private transient Connection connection; - - public BigTableStorageManager(StorageSpec storageSpec) { - Preconditions.checkArgument(storageSpec.getType().equals(TYPE)); - this.storageSpec = storageSpec; - } - - Connection getConnection() { - if (connection == null) { - String projectId = storageSpec.getOptionsOrThrow(STORE_OPT_BIGTABLE_PROJECT); - String instanceId = storageSpec.getOptionsOrThrow(STORE_OPT_BIGTABLE_INSTANCE); - connection = BigtableConfiguration.connect(projectId, instanceId); - } - return connection; - } - - /** - * Update the Bigtable schema given the addition of a new feature - * - * @param featureSpec specification of the new feature. - */ - @Override - public void registerNewFeature(FeatureSpec featureSpec) { - String entityName = featureSpec.getEntity(); - String columnFamily = featureSpec - .getOptionsOrDefault(FEATURE_OPT_BIGTABLE_TABLE_COLUMN_FAMILY, null); - if (Strings.isNullOrEmpty(columnFamily)) { - columnFamily = storageSpec - .getOptionsOrDefault(STORE_OPT_BIGTABLE_TABLE_COLUMN_FAMILY, DEFAULT_COLUMN_FAMILY); - } - String tablePrefix = storageSpec.getOptionsOrDefault(STORE_OPT_BIGTABLE_TABLE_PREFIX, ""); - String tableNameString = tablePrefix + entityName; - - try (Admin admin = getConnection().getAdmin()) { - TableName tableName = TableName.valueOf(tableNameString.getBytes()); - - if (!admin.tableExists(tableName)) { - TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).build(); - admin.createTable(tableDescriptor); - log.info("Created new table for entity: {}", entityName); - } - - TableDescriptor tableDescriptor = admin.getDescriptor(tableName); - if (!isColumnFamilyExist(tableDescriptor, columnFamily)) { - ColumnFamilyDescriptor cfDesc = ColumnFamilyDescriptorBuilder - .newBuilder(columnFamily.getBytes()) - .build(); - - admin.addColumnFamily(tableName, cfDesc); - log.info("Created new column family: {} for entity: {}", columnFamily, entityName); - } - AuditLogger.log( - Resource.STORAGE, - storageSpec.getId(), - Action.SCHEMA_UPDATE, - "Bigtable schema updated for feature %s", - featureSpec.getId()); - } catch (IOException e) { - log.error("Unable to create table in BigTable: {}", e); - throw new StorageInitializationException("Unable to create table in BigTable", e); - } - } - - private boolean isColumnFamilyExist(TableDescriptor tableDescriptor, String columnFamilyName) { - return tableDescriptor.getColumnFamily(columnFamilyName.getBytes()) != null; - } -} diff --git a/core/src/main/java/feast/core/storage/JsonFileStorageManager.java b/core/src/main/java/feast/core/storage/JsonFileStorageManager.java deleted file mode 100644 index 0db53260268..00000000000 --- a/core/src/main/java/feast/core/storage/JsonFileStorageManager.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class JsonFileStorageManager implements StorageManager { - - public static final String TYPE = "file.json"; - public static final String OPT_FILE_PATH = "path"; - - public JsonFileStorageManager( - StorageSpec storageSpec) { - Preconditions.checkArgument(storageSpec.getType().equals(TYPE)); - Preconditions - .checkArgument( - !Strings.isNullOrEmpty(storageSpec.getOptionsOrDefault(OPT_FILE_PATH, null))); - } - - /** - * Update the BigQuery schema of this table given the addition of this feature. - * - * @param featureSpec specification of the new feature. - */ - @Override - public void registerNewFeature(FeatureSpec featureSpec) { - - } -} diff --git a/core/src/main/java/feast/core/storage/PostgresStorageManager.java b/core/src/main/java/feast/core/storage/PostgresStorageManager.java deleted file mode 100644 index 8875510d174..00000000000 --- a/core/src/main/java/feast/core/storage/PostgresStorageManager.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import feast.core.log.Action; -import feast.core.log.AuditLogger; -import feast.core.log.Resource; -import org.jdbi.v3.core.Jdbi; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.ValueType.Enum; - -import java.util.HashMap; -import java.util.Map; - -public class PostgresStorageManager implements StorageManager { - - public static final String TYPE = "postgres"; - - public static final String OPT_POSTGRES_URI = "uri"; // jdbc connection URI - public static final String OPT_POSTGRES_TABLE_PREFIX = "tablePrefix"; - - private String id; - private final String connectionUri; - private static final Map FEAST_TO_POSTGRES_TYPE_MAP = new HashMap<>(); - - static { - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.BOOL, "BOOLEAN"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.INT32, "INTEGER"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.INT64, "BIGINT"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.BYTES, "VARBINARY"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.FLOAT, "REAL"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.DOUBLE, "DOUBLE"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.TIMESTAMP, "TIMESTAMP"); - FEAST_TO_POSTGRES_TYPE_MAP.put(Enum.STRING, "VARCHAR"); - } - - private static final String CREATE_TABLE_TEMPLATE = - "CREATE TABLE IF NOT EXISTS %s ( " - + "id VARCHAR(255) PRIMARY KEY NOT NULL , timestamp TIMESTAMP )"; - private static final String INSERT_COLUMN_TEMPLATE = - "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s"; - - public PostgresStorageManager(String id, String connectionUri) { - this.id = id; - this.connectionUri = connectionUri; - } - - /** - * Update the schema of this table given the addition of this feature. - * @param featureSpec specification of the new feature. - */ - @Override - public void registerNewFeature(FeatureSpec featureSpec) { - String tableName = createTableName(featureSpec); - String column = featureSpec.getName(); - String fieldType = createFieldType(featureSpec); - - Jdbi jdbi = Jdbi.create(connectionUri); - jdbi.withHandle( - handle -> { - handle.execute(String.format(CREATE_TABLE_TEMPLATE, tableName)); - handle.execute(String.format(INSERT_COLUMN_TEMPLATE, tableName, column, fieldType)); - return null; - }); - AuditLogger.log( - Resource.STORAGE, - this.id, - Action.SCHEMA_UPDATE, - "Postgres schema updated for feature %s", - featureSpec.getId()); - } - - private String createFieldType(FeatureSpec featureSpec) { - return FEAST_TO_POSTGRES_TYPE_MAP.get(featureSpec.getValueType()); - } - - private String createTableName(FeatureSpec featureSpec) { - String entityName = featureSpec.getEntity().toLowerCase(); - return String.format("%s", entityName); - } -} diff --git a/core/src/main/java/feast/core/storage/RedisStorageManager.java b/core/src/main/java/feast/core/storage/RedisStorageManager.java deleted file mode 100644 index 41f9d863eab..00000000000 --- a/core/src/main/java/feast/core/storage/RedisStorageManager.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import feast.specs.FeatureSpecProto.FeatureSpec; - -public class RedisStorageManager implements StorageManager { - public static final String TYPE = "redis"; - - public static final String OPT_REDIS_HOST = "host"; - public static final String OPT_REDIS_PORT = "port"; - private final String id; - - public RedisStorageManager(String id) { - this.id = id; - } - - @Override - public void registerNewFeature(FeatureSpec featureSpec) { - // do nothing - } -} diff --git a/core/src/main/java/feast/core/storage/SchemaManager.java b/core/src/main/java/feast/core/storage/SchemaManager.java deleted file mode 100644 index c70fb5122b6..00000000000 --- a/core/src/main/java/feast/core/storage/SchemaManager.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.common.base.Preconditions; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class SchemaManager { - - private final Map storageRegistry = new ConcurrentHashMap<>(); - private final BigQueryViewTemplater viewTemplater; - private final StorageSpecs storageSpecs; - - public SchemaManager(BigQueryViewTemplater viewTemplater, StorageSpecs storageSpecs) { - this.viewTemplater = viewTemplater; - this.storageSpecs = storageSpecs; - if (storageSpecs.getServingStorageSpec() != null) { - registerStorage(storageSpecs.getServingStorageSpec()); - } - if (storageSpecs.getWarehouseStorageSpec() != null) { - registerStorage(storageSpecs.getWarehouseStorageSpec()); - } - } - - /** - * Prepare warehouse and serving storage for the feature. - * - * @param featureSpec spec of the new feature. - */ - public void registerFeature(FeatureSpec featureSpec) { - Preconditions.checkNotNull(storageSpecs.getServingStorageSpec(), - "Attempted to register feature but no serving storage is configured"); - StorageManager servingStorageManager = storageRegistry - .get(storageSpecs.getServingStorageSpec().getId()); - Preconditions.checkNotNull(servingStorageManager, - "Serving storage spec has no associated storage manager"); - servingStorageManager.registerNewFeature(featureSpec); - - if (storageSpecs.getWarehouseStorageSpec() != null) { - StorageManager warehouseStorageManager = storageRegistry - .get(storageSpecs.getWarehouseStorageSpec().getId()); - Preconditions.checkNotNull(warehouseStorageManager, - "Warehouse storage spec has no associated storage manager"); - warehouseStorageManager.registerNewFeature(featureSpec); - } - } - - /** - * Register new storage. - * - * @param storageSpec new storage spec. - */ - public void registerStorage(StorageSpec storageSpec) { - Map options = storageSpec.getOptionsMap(); - String id = storageSpec.getId(); - StorageManager storageManager = null; - - switch (storageSpec.getType()) { - case BigTableStorageManager.TYPE: - storageManager = new BigTableStorageManager(storageSpec); - break; - case BigQueryStorageManager.TYPE: - String datasetName = options.get(BigQueryStorageManager.OPT_BIGQUERY_DATASET); - String bqProjectId = options.get(BigQueryStorageManager.OPT_BIGQUERY_PROJECT); - BigQuery bigQuery = - BigQueryOptions.newBuilder().setProjectId(bqProjectId).build().getService(); - storageManager = - new BigQueryStorageManager(id, bigQuery, bqProjectId, datasetName, viewTemplater); - break; - case JsonFileStorageManager.TYPE: - storageManager = new JsonFileStorageManager(storageSpec); - break; - case PostgresStorageManager.TYPE: - String connectionUri = options.get(PostgresStorageManager.OPT_POSTGRES_URI); - storageManager = new PostgresStorageManager(id, connectionUri); - break; - case RedisStorageManager.TYPE: - storageManager = new RedisStorageManager(id); - break; - default: - log.warn("Unknown storage type: {} \n {}", storageSpec.getType(), storageSpec); - return; - } - storageRegistry.put(id, storageManager); - } -} diff --git a/core/src/main/java/feast/core/storage/StorageManager.java b/core/src/main/java/feast/core/storage/StorageManager.java deleted file mode 100644 index b41d0883c60..00000000000 --- a/core/src/main/java/feast/core/storage/StorageManager.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import feast.specs.FeatureSpecProto.FeatureSpec; - -public interface StorageManager { - /** - * Register a new feature into storage. - * - * @param featureSpec specification of the new feature. - */ - void registerNewFeature(FeatureSpec featureSpec); -} diff --git a/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java b/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java deleted file mode 100644 index 7edf453b5a1..00000000000 --- a/core/src/main/java/feast/core/training/BigQueryDatasetTemplater.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.core.training; - -import com.google.protobuf.Timestamp; -import com.hubspot.jinjava.Jinjava; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.dao.FeatureInfoRepository; -import feast.core.model.FeatureInfo; -import feast.core.storage.BigQueryStorageManager; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType.Enum; -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.stream.Collectors; - -public class BigQueryDatasetTemplater { - - private final FeatureInfoRepository featureInfoRepository; - private final Jinjava jinjava; - private final String template; - private final StorageSpec storageSpec; - private final DateTimeFormatter formatter; - - public BigQueryDatasetTemplater( - Jinjava jinjava, - String templateString, - StorageSpec storageSpec, - FeatureInfoRepository featureInfoRepository) { - this.storageSpec = storageSpec; - this.featureInfoRepository = featureInfoRepository; - this.jinjava = jinjava; - this.template = templateString; - this.formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd").withZone(ZoneId.of("UTC")); - } - - protected StorageSpec getStorageSpec() { - return storageSpec; - } - - /** - * Create query from a template. - * - * @param featureSet feature set - * @param startDate start date - * @param endDate end date - * @param limit limit - * @param filters additional WHERE clause - * @return SQL query for creating training table. - */ - String createQuery( - FeatureSet featureSet, - Timestamp startDate, - Timestamp endDate, - long limit, - Map filters) { - List featureIds = featureSet.getFeatureIdsList(); - List featureInfos = getFeatureInfosOrThrow(featureIds); - - // split filter based on ValueType of the feature - Map tmpFilter = new HashMap<>(filters); - Map numberFilters = new HashMap<>(); - Map stringFilters = new HashMap<>(); - if (filters.containsKey("job_id")) { - stringFilters.put("job_id", tmpFilter.get("job_id")); - tmpFilter.remove("job_id"); - } - - List featureFilterInfos = getFeatureInfosOrThrow(new ArrayList<>(tmpFilter.keySet())); - Map featureInfoMap = new HashMap<>(); - for (FeatureInfo featureInfo: featureFilterInfos) { - featureInfoMap.put(featureInfo.getId(), featureInfo); - } - - - for (Map.Entry filter : tmpFilter.entrySet()) { - FeatureInfo featureInfo = featureInfoMap.get(filter.getKey()); - if (isMappableToString(featureInfo.getValueType())) { - stringFilters.put(featureInfo.getName(), filter.getValue()); - } else { - numberFilters.put(featureInfo.getName(), filter.getValue()); - } - } - - List featureNames = getFeatureNames(featureInfos); - String tableId = getBqTableId(featureInfos.get(0)); - String startDateStr = formatDateString(startDate); - String endDateStr = formatDateString(endDate); - String limitStr = (limit != 0) ? String.valueOf(limit) : null; - return renderTemplate(tableId, featureNames, startDateStr, endDateStr, limitStr, - numberFilters, stringFilters); - } - - private boolean isMappableToString(Enum valueType) { - return valueType.equals(Enum.STRING); - } - - private List getFeatureNames(List featureInfos) { - return featureInfos.stream().map(FeatureInfo::getName).collect(Collectors.toList()); - } - - private List getFeatureInfosOrThrow(List featureIds) { - List featureInfos = featureInfoRepository.findAllById(featureIds); - if (featureInfos.size() < featureIds.size()) { - Set foundFeatureIds = - featureInfos.stream().map(FeatureInfo::getId).collect(Collectors.toSet()); - featureIds.removeAll(foundFeatureIds); - throw new NoSuchElementException("features not found: " + featureIds); - } - return featureInfos; - } - - private String renderTemplate( - String tableId, List features, String startDateStr, String endDateStr, String limitStr, - Map numberFilters, - Map stringFilters) { - Map context = new HashMap<>(); - - context.put("table_id", tableId); - context.put("features", features); - context.put("start_date", startDateStr); - context.put("end_date", endDateStr); - context.put("limit", limitStr); - context.put("number_filters", numberFilters); - context.put("string_filters", stringFilters); - return jinjava.render(template, context); - } - - private String getBqTableId(FeatureInfo featureInfo) { - String type = storageSpec.getType(); - - if (!BigQueryStorageManager.TYPE.equals(type)) { - throw new IllegalArgumentException( - "One of the feature has warehouse storage other than bigquery"); - } - - StorageSpec storageSpec = getStorageSpec(); - Map options = storageSpec.getOptionsMap(); - String projectId = options.get("project"); - String dataset = options.get("dataset"); - String entityName = featureInfo.getFeatureSpec().getEntity().toLowerCase(); - return String.format("%s.%s.%s", projectId, dataset, entityName); - } - - private String formatDateString(Timestamp timestamp) { - Instant instant = Instant.ofEpochSecond(timestamp.getSeconds()).truncatedTo(ChronoUnit.DAYS); - return formatter.format(instant); - } -} diff --git a/core/src/main/java/feast/core/training/BigQueryTraningDatasetCreator.java b/core/src/main/java/feast/core/training/BigQueryTraningDatasetCreator.java deleted file mode 100644 index 4584aa7987b..00000000000 --- a/core/src/main/java/feast/core/training/BigQueryTraningDatasetCreator.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.core.training; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQuery.JobOption; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.cloud.bigquery.JobException; -import com.google.cloud.bigquery.QueryJobConfiguration; -import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.TableInfo; -import com.google.cloud.bigquery.TableResult; -import com.google.common.base.Strings; -import com.google.protobuf.Timestamp; -import feast.core.DatasetServiceProto.DatasetInfo; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.exception.TrainingDatasetCreationException; -import feast.core.util.UuidProvider; -import java.time.Instant; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class BigQueryTraningDatasetCreator { - - private final BigQueryDatasetTemplater templater; - private final DateTimeFormatter formatter; - private final String projectId; - private final String datasetPrefix; - private final UuidProvider uuidProvider; - private transient BigQuery bigQuery; - - public BigQueryTraningDatasetCreator( - BigQueryDatasetTemplater templater, - String projectId, - String datasetPrefix, - UuidProvider uuidProvider) { - this( - templater, - projectId, - datasetPrefix, - uuidProvider, - BigQueryOptions.newBuilder().setProjectId(projectId).build().getService()); - } - - public BigQueryTraningDatasetCreator( - BigQueryDatasetTemplater templater, - String projectId, - String datasetPrefix, - UuidProvider uuidProvider, - BigQuery bigQuery) { - this.templater = templater; - this.formatter = DateTimeFormatter.ofPattern("yyyyMMdd").withZone(ZoneId.of("UTC")); - this.projectId = projectId; - this.datasetPrefix = datasetPrefix; - this.bigQuery = bigQuery; - this.uuidProvider = uuidProvider; - } - - /** - * Create a training dataset for a feature set for features created between startDate (inclusive) - * and endDate (inclusive) - * - * @param featureSet feature set for which the training dataset should be created - * @param startDate starting date of the training dataset (inclusive) - * @param endDate end date of the training dataset (inclusive) - * @param limit maximum number of row should be created. - * @param namePrefix prefix for dataset name - * @param filters additional where clause - * @return dataset info associated with the created training dataset - */ - public DatasetInfo createDataset( - FeatureSet featureSet, - Timestamp startDate, - Timestamp endDate, - long limit, - String namePrefix, - Map filters) { - try { - String query = templater.createQuery(featureSet, startDate, endDate, limit, filters); - String tableName = createBqTableName(datasetPrefix, featureSet, namePrefix); - String tableDescription = createBqTableDescription(featureSet, startDate, endDate, query); - - Map options = templater.getStorageSpec().getOptionsMap(); - String bq_dataset = options.get("dataset"); - TableId destinationTableId = TableId.of(projectId, bq_dataset, tableName); - - // Create the BigQuery table that will store the training dataset if not exists - if (bigQuery.getTable(destinationTableId) == null) { - QueryJobConfiguration queryConfig = - QueryJobConfiguration.newBuilder(query) - .setAllowLargeResults(true) - .setDestinationTable(destinationTableId) - .build(); - JobOption jobOption = JobOption.fields(); - TableResult res = bigQuery.query(queryConfig, jobOption); - if (res != null) { - Table destinationTable = bigQuery.getTable(destinationTableId); - TableInfo tableInfo = - destinationTable.toBuilder().setDescription(tableDescription).build(); - bigQuery.update(tableInfo); - } - } - - return DatasetInfo.newBuilder() - .setName(tableName) - .setTableUrl(toTableUrl(destinationTableId)) - .build(); - } catch (JobException e) { - log.error("Failed creating training dataset", e); - throw new TrainingDatasetCreationException("Failed creating training dataset", e); - } catch (InterruptedException e) { - log.error("Training dataset creation was interrupted", e); - throw new TrainingDatasetCreationException("Training dataset creation was interrupted", e); - } - } - - private String createBqTableName(String datasetPrefix, FeatureSet featureSet, String namePrefix) { - - String suffix = uuidProvider.getUuid(); - - if (!Strings.isNullOrEmpty(namePrefix)) { - // only alphanumeric and underscore are allowed - namePrefix = namePrefix.replaceAll("[^a-zA-Z0-9_]", "_"); - return String.format( - "%s_%s_%s_%s", datasetPrefix, featureSet.getEntityName(), namePrefix, suffix); - } - - return String.format("%s_%s_%s", datasetPrefix, featureSet.getEntityName(), suffix); - } - - private String createBqTableDescription( - FeatureSet featureSet, Timestamp startDate, Timestamp endDate, String query) { - return String.format( - "Feast Dataset for %s features.\nContains data from %s to %s.\n Last edited at %s.\n\n-----\n\n%s", - featureSet.getEntityName(), - formatTimestamp(startDate), - formatTimestamp(endDate), - Instant.now(), - query); - } - - private String formatTimestamp(Timestamp timestamp) { - Instant instant = Instant.ofEpochSecond(timestamp.getSeconds()); - return formatter.format(instant); - } - - private String toTableUrl(TableId tableId) { - return String.format( - "%s.%s.%s", tableId.getProject(), tableId.getDataset(), tableId.getTable()); - } -} diff --git a/core/src/main/java/feast/core/util/PackageUtil.java b/core/src/main/java/feast/core/util/PackageUtil.java new file mode 100644 index 00000000000..3ee3744d96d --- /dev/null +++ b/core/src/main/java/feast/core/util/PackageUtil.java @@ -0,0 +1,125 @@ +package feast.core.util; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Enumeration; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SuppressWarnings("WeakerAccess") +public class PackageUtil { + + // TODO: Unit tests for PackageUtil + + private static Logger LOG = LoggerFactory.getLogger(PackageUtil.class); + + /** + * Get a local file path from a URL that reference classes or a resource located in Spring Boot + * packaged jar. + * + *

The packaged jar will be extracted, if needed, in order to get a file path that directly + * points to the resource location. Note that the extraction process can take several minutes to + * complete. + * + *

One use case of this function is to detect the class path of resources to stage when + * using Dataflow runner. The resource URL however is in "jar:file:" format, which cannot be + * handled by default in Apache Beam. + * + *

+   * @code
+   * URL url = new URL("jar:file:/tmp/springexample/target/spring-example-1.0-SNAPSHOT.jar!/BOOT-INF/lib/beam-sdks-java-core-2.16.0.jar!/");
+   * String resolvedPath = resolveSpringBootPackageClasspath(url);
+   * // resolvedPath should point to "/tmp/springexample/target/spring-example-1.0-SNAPSHOT/BOOT-INF/lib/beam-sdks-java-core-2.16.0.jar"
+   * // Note that spring-example-1.0-SNAPSHOT.jar is extracted in the process.
+   * 
+ * + * @param url Location of the resource or classes to resolve, must start with "jar:file:". + * @return Local file path that points to the resource file. + * @throws IOException If read or write error occurs during the resolve process. + */ + public static String resolveSpringBootPackageClasspath(URL url) throws IOException { + if (!url.toString().startsWith("jar:file:")) { + throw new IllegalArgumentException("URL must start with 'jar:file:'"); + } + + String path = url.toString().substring(9).replaceAll("!/", "/"); + if (path.endsWith("/")) { + path = path.substring(0, path.length() - 1); + } + + if (path.contains(".jar/BOOT-INF/")) { + String jarPath = path.substring(0, path.indexOf(".jar/BOOT-INF/") + 4); + String extractedJarPath = jarPath.substring(0, jarPath.length() - 4); + + if (Files.notExists(Paths.get(extractedJarPath))) { + LOG.info( + "Extracting '{}' to '{}' so we can get a local file path for the resource.", + jarPath, extractedJarPath); + extractJar(jarPath, extractedJarPath); + } + path = path.replace(".jar/BOOT-INF/", "/BOOT-INF/"); + } + + return path; + } + + // TODO: extractJar() currently is quite slow because it only uses a single core to extract the + // jar. Extracting a jar packaged by Spring boot, for example, can take more than 5 minutes. One + // way to speed it up is to parallelize the extraction. + + /** + * Extract contents of a jar file to an output directory. + * + *

Adapted from: https://stackoverflow.com/a/1529707/3949303 + * + * @param jarPath File path of the jar file to extract. + * @param destDirPath Destination directory to extract the jar content, will be created if not + * exists. + * @throws IOException If error occured when reading or writing files. + */ + public static void extractJar(String jarPath, String destDirPath) throws IOException { + File destDirFile = new File(destDirPath); + + if (destDirFile.exists() && !destDirFile.isDirectory()) { + throw new IOException(destDirPath + " must be a directory path"); + } + + if (!destDirFile.exists()) { + if (!destDirFile.mkdirs()) { + throw new IOException("Failed to create directory: " + destDirPath); + } + } + + JarFile jar = new JarFile(jarPath); + Enumeration enumEntries = jar.entries(); + + while (enumEntries.hasMoreElements()) { + JarEntry jarEntry = (java.util.jar.JarEntry) enumEntries.nextElement(); + File outFile = new java.io.File(destDirPath + File.separator + jarEntry.getName()); + + if (jarEntry.isDirectory()) { + if (!outFile.mkdir()) { + throw new IOException("Failed to created directory: " + outFile); + } + continue; + } + + InputStream is = jar.getInputStream(jarEntry); + FileOutputStream fos = new FileOutputStream(outFile); + while (is.available() > 0) { + fos.write(is.read()); + } + fos.close(); + is.close(); + } + + jar.close(); + } +} diff --git a/core/src/main/java/feast/core/util/PathUtil.java b/core/src/main/java/feast/core/util/PathUtil.java deleted file mode 100644 index 53e74526f60..00000000000 --- a/core/src/main/java/feast/core/util/PathUtil.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.util; - -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class PathUtil { - - /** - * Gets a path with a schema if present - */ - public static Path getPath(String value) { - if (value.contains("://")) { - try { - return Paths.get(new URI(value)); - } catch (URISyntaxException e) { - throw new IllegalArgumentException(e); - } - } else { - return Paths.get(value); - } - } -} diff --git a/core/src/main/java/feast/core/util/PipelineUtil.java b/core/src/main/java/feast/core/util/PipelineUtil.java new file mode 100644 index 00000000000..9d08aee12a0 --- /dev/null +++ b/core/src/main/java/feast/core/util/PipelineUtil.java @@ -0,0 +1,56 @@ +package feast.core.util; + +import static feast.core.util.PackageUtil.resolveSpringBootPackageClasspath; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.List; + +public class PipelineUtil { + + /** + * Attempts to detect all the resources the class loader has access to. This does not recurse to + * class loader parents stopping it from pulling in resources from the system class loader. + *

+ * This method extends this implemention https://github.com/apache/beam/blob/01726e9c62313749f9ea7c93063a1178abd1a8db/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineResources.java#L51 + * to support URL that starts with "jar:file:", usually coming from a packaged Spring Boot jar. + * + * @param classLoader The URLClassLoader to use to detect resources to stage. + * @return A list of absolute paths to the resources the class loader uses. + * @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one of + * the resources the class loader exposes is not a file + * resource. + * @throws IOException If there is an error in reading or writing files. + */ + public static List detectClassPathResourcesToStage(ClassLoader classLoader) + throws IOException { + if (!(classLoader instanceof URLClassLoader)) { + String message = + String.format( + "Unable to use ClassLoader to detect classpath elements. " + + "Current ClassLoader is %s, only URLClassLoaders are supported.", + classLoader); + throw new IllegalArgumentException(message); + } + + List files = new ArrayList<>(); + for (URL url : ((URLClassLoader) classLoader).getURLs()) { + if (url.toString().startsWith("jar:file:")) { + files.add(resolveSpringBootPackageClasspath(url)); + continue; + } + + try { + files.add(new File(url.toURI()).getAbsolutePath()); + } catch (IllegalArgumentException | URISyntaxException e) { + String message = String.format("Unable to convert url (%s) to file.", url); + throw new IllegalArgumentException(message, e); + } + } + return files; + } +} diff --git a/core/src/main/java/feast/core/util/RandomUuidProvider.java b/core/src/main/java/feast/core/util/RandomUuidProvider.java deleted file mode 100644 index 67e155d5775..00000000000 --- a/core/src/main/java/feast/core/util/RandomUuidProvider.java +++ /dev/null @@ -1,10 +0,0 @@ -package feast.core.util; - -import java.util.UUID; - -public class RandomUuidProvider implements UuidProvider { - @Override - public String getUuid() { - return UUID.randomUUID().toString().replace("-",""); - } -} diff --git a/core/src/main/java/feast/core/util/TypeConversion.java b/core/src/main/java/feast/core/util/TypeConversion.java index 11ef23d3b0e..0c047cd882f 100644 --- a/core/src/main/java/feast/core/util/TypeConversion.java +++ b/core/src/main/java/feast/core/util/TypeConversion.java @@ -17,11 +17,13 @@ package feast.core.util; +import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.*; +import java.util.Map.Entry; public class TypeConversion { private static Gson gson = new Gson(); @@ -76,4 +78,18 @@ public static String convertMapToJsonString(Map map) { } return gson.toJson(map); } + + /** + * Convert a map of key value pairs to a array of java arguments in format --key=value + * + * @param map + * @return array of string arguments + */ + public static String[] convertMapToArgs(Map map) { + List args = new ArrayList<>(); + for (Entry arg : map.entrySet()) { + args.add(Strings.lenientFormat("--%s=%s", arg.getKey(), arg.getValue())); + } + return args.toArray(new String[]{}); + } } diff --git a/core/src/main/java/feast/core/util/UuidProvider.java b/core/src/main/java/feast/core/util/UuidProvider.java deleted file mode 100644 index c537560d1c6..00000000000 --- a/core/src/main/java/feast/core/util/UuidProvider.java +++ /dev/null @@ -1,5 +0,0 @@ -package feast.core.util; - -public interface UuidProvider { - String getUuid(); -} diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java new file mode 100644 index 00000000000..3aa365eebf8 --- /dev/null +++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java @@ -0,0 +1,34 @@ +package feast.core.validators; + +import static feast.core.validators.Matchers.checkValidCharacters; + +import com.google.common.collect.Sets; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import java.util.HashSet; +import java.util.List; +import java.util.stream.Collectors; + +public class FeatureSetValidator { + public static void validateSpec(FeatureSetSpec featureSetSpec) { + checkValidCharacters(featureSetSpec.getName(), "name"); + checkUniqueColumns(featureSetSpec.getEntitiesList(), featureSetSpec.getFeaturesList()); + for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) { + checkValidCharacters(entitySpec.getName(), "entities::name"); + } + for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + checkValidCharacters(featureSpec.getName(), "features::name"); + } + } + + private static void checkUniqueColumns(List entitySpecs, List featureSpecs) { + List names = entitySpecs.stream().map(EntitySpec::getName).collect(Collectors.toList()); + featureSpecs.stream().map(f -> names.add(f.getName())); + HashSet nameSet = Sets.newHashSet(names); + if (nameSet.size() != names.size()) { + throw new IllegalArgumentException(String.format( + "fields within a featureset must be unique.")); + } + } +} diff --git a/core/src/main/java/feast/core/validators/Matchers.java b/core/src/main/java/feast/core/validators/Matchers.java index 84fbd644954..f67a5bb3808 100644 --- a/core/src/main/java/feast/core/validators/Matchers.java +++ b/core/src/main/java/feast/core/validators/Matchers.java @@ -17,35 +17,58 @@ package feast.core.validators; -import com.google.common.base.Strings; - import java.util.regex.Pattern; public class Matchers { + private static Pattern UPPER_SNAKE_CASE_REGEX = Pattern.compile("^[A-Z0-9]+(_[A-Z0-9]+)*$"); private static Pattern LOWER_SNAKE_CASE_REGEX = Pattern.compile("^[a-z0-9]+(_[a-z0-9]+)*$"); + private static Pattern VALID_CHARACTERS_REGEX = Pattern.compile("^[a-zA-Z0-9\\-_]*$"); + private static Pattern VALID_CHARACTERS_FSET_FILTER_REGEX = Pattern.compile("^[a-zA-Z0-9\\-_*]*$"); private static String ERROR_MESSAGE_TEMPLATE = "invalid value for field %s: %s"; public static void checkUpperSnakeCase(String input, String fieldName) - throws IllegalArgumentException { + throws IllegalArgumentException { if (!UPPER_SNAKE_CASE_REGEX.matcher(input).matches()) { throw new IllegalArgumentException( - Strings.lenientFormat( - ERROR_MESSAGE_TEMPLATE, - fieldName, - "argument must be in upper snake case, and cannot include any special characters.")); + String.format( + ERROR_MESSAGE_TEMPLATE, + fieldName, + "argument must be in upper snake case, and cannot include any special characters.")); } } public static void checkLowerSnakeCase(String input, String fieldName) - throws IllegalArgumentException { + throws IllegalArgumentException { if (!LOWER_SNAKE_CASE_REGEX.matcher(input).matches()) { throw new IllegalArgumentException( - Strings.lenientFormat( - ERROR_MESSAGE_TEMPLATE, - fieldName, - "argument must be in lower snake case, and cannot include any special characters.")); + String.format( + ERROR_MESSAGE_TEMPLATE, + fieldName, + "argument must be in lower snake case, and cannot include any special characters.")); + } + } + + public static void checkValidCharacters(String input, String fieldName) + throws IllegalArgumentException { + if (!VALID_CHARACTERS_REGEX.matcher(input).matches()) { + throw new IllegalArgumentException( + String.format( + ERROR_MESSAGE_TEMPLATE, + fieldName, + "argument must only contain alphanumeric characters, dashes and underscores.")); + } + } + + public static void checkValidFeatureSetFilterName(String input, String fieldName) + throws IllegalArgumentException { + if (!VALID_CHARACTERS_FSET_FILTER_REGEX.matcher(input).matches()) { + throw new IllegalArgumentException( + String.format( + ERROR_MESSAGE_TEMPLATE, + fieldName, + "argument must only contain alphanumeric characters, dashes, underscores, or an asterisk.")); } } } diff --git a/core/src/main/java/feast/core/validators/SpecValidator.java b/core/src/main/java/feast/core/validators/SpecValidator.java deleted file mode 100644 index 5ceafdd585f..00000000000 --- a/core/src/main/java/feast/core/validators/SpecValidator.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.validators; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import static feast.core.validators.Matchers.checkLowerSnakeCase; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import com.google.common.collect.Streams; -import feast.core.dao.EntityInfoRepository; -import feast.core.dao.FeatureGroupInfoRepository; -import feast.core.dao.FeatureInfoRepository; -import feast.core.storage.BigQueryStorageManager; -import feast.core.storage.BigTableStorageManager; -import feast.core.storage.JsonFileStorageManager; -import feast.core.storage.PostgresStorageManager; -import feast.core.storage.RedisStorageManager; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import org.springframework.beans.factory.annotation.Autowired; - -public class SpecValidator { - - private static final String NO_STORE = ""; - private static final String[] SUPPORTED_WAREHOUSE_STORES = - new String[]{ - BigQueryStorageManager.TYPE, JsonFileStorageManager.TYPE - }; - private static final String[] SUPPORTED_SERVING_STORES = - new String[]{ - BigTableStorageManager.TYPE, PostgresStorageManager.TYPE, RedisStorageManager.TYPE, - }; - private static final String[] SUPPORTED_ERRORS_STORES = new String[]{"file.json", "stderr", - "stdout"}; - - private EntityInfoRepository entityInfoRepository; - private FeatureGroupInfoRepository featureGroupInfoRepository; - private FeatureInfoRepository featureInfoRepository; - - @Autowired - public SpecValidator( - EntityInfoRepository entityInfoRepository, - FeatureGroupInfoRepository featureGroupInfoRepository, - FeatureInfoRepository featureInfoRepository) { - - this.entityInfoRepository = entityInfoRepository; - this.featureGroupInfoRepository = featureGroupInfoRepository; - this.featureInfoRepository = featureInfoRepository; - } - - /** - * Validates a given feature spec's contents, throwing and IllegalArgumentException if the spec is - * invalid. - */ - public void validateFeatureSpec(FeatureSpec spec) throws IllegalArgumentException { - try { - // check not not null - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - checkArgument(!spec.getName().equals(""), "Name field cannot be empty"); - checkLowerSnakeCase(spec.getName(), "Name"); - checkArgument(!spec.getOwner().equals(""), "Owner field cannot be empty"); - checkArgument(!spec.getDescription().equals(""), "Description field cannot be empty"); - checkArgument(!spec.getEntity().equals(""), "Entity field cannot be empty"); - - // check id validity - String[] idSplit = spec.getId().split("\\."); - checkArgument(idSplit.length == 2, "Id must contain entity, name"); - checkArgument( - idSplit[0].equals(spec.getEntity()), - "Id must be in format entity.name, entity in Id does not match entity provided."); - checkArgument( - idSplit[1].equals(spec.getName()), - "Id must be in format entity.name, name in Id does not match name provided."); - - // check if referenced objects exist - checkArgument( - entityInfoRepository.existsById(spec.getEntity()), - Strings.lenientFormat("Entity with name %s does not exist", spec.getEntity())); - - if (!spec.getGroup().equals("")) { - Optional groupOptional = featureGroupInfoRepository.findById(spec.getGroup()); - if (!groupOptional.isPresent()) { - throw new IllegalArgumentException( - Strings.lenientFormat("Group with id %s does not exist", spec.getGroup())); - } - } - - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for feature spec with id %s failed: %s", spec.getId(), e.getMessage())); - } - } - - public void validateFeatureGroupSpec(FeatureGroupSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - checkLowerSnakeCase(spec.getId(), "Id"); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for feature group spec with id %s failed: %s", - spec.getId(), e.getMessage())); - } - } - - public void validateEntitySpec(EntitySpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getName().equals(""), "Name field cannot be empty"); - checkLowerSnakeCase(spec.getName(), "Name"); - checkNotNull(spec.getDescription(), "Description field cannot be empty"); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for entity spec with name %s failed: %s", - spec.getName(), e.getMessage())); - } - } - - public void validateStorageSpec(StorageSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - Matchers.checkUpperSnakeCase(spec.getId(), "Id"); - checkArgument(Streams.concat( - Arrays.stream(SUPPORTED_SERVING_STORES), - Arrays.stream(SUPPORTED_WAREHOUSE_STORES)).collect(Collectors.toList()) - .contains(spec.getType()), - "Store type not supported " + spec.getType()); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for storage spec with id %s failed: %s", spec.getId(), e.getMessage()), - e); - } - } - - // TODO: add validation for storage types and options - public void validateServingStorageSpec(StorageSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - Matchers.checkUpperSnakeCase(spec.getId(), "Id"); - checkArgument(Arrays.asList(SUPPORTED_SERVING_STORES).contains(spec.getType()), - "Serving store type not supported " + spec.getType()); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for storage spec with id %s failed: %s", spec.getId(), e.getMessage()), - e); - } - } - - public void validateWarehouseStorageSpec(StorageSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - Matchers.checkUpperSnakeCase(spec.getId(), "Id"); - checkArgument(Arrays.asList(SUPPORTED_WAREHOUSE_STORES).contains(spec.getType()), - "Warehouse store type not supported " + spec.getType()); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for storage spec with id %s failed: %s", spec.getId(), e.getMessage()), - e); - } - } - - public void validateErrorsStorageSpec(StorageSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getId().equals(""), "Id field cannot be empty"); - Matchers.checkUpperSnakeCase(spec.getId(), "Id"); - checkArgument(Arrays.asList(SUPPORTED_ERRORS_STORES).contains(spec.getType()), - "Errors store type not supported " + spec.getType()); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat( - "Validation for storage spec with id %s failed: %s", spec.getId(), e.getMessage()), - e); - } - } - - public void validateImportSpec(ImportSpec spec) throws IllegalArgumentException { - try { - switch (spec.getType()) { - case "kafka": - checkKafkaImportSpecOption(spec); - break; - case "pubsub": - checkPubSubImportSpecOption(spec); - break; - case "file.csv": - case "file.json": - checkFileImportSpecOption(spec); - checkArgument( - !spec.getSchema().getEntityIdColumn().equals(""), - "entityId column must be specified in schema"); - break; - case "bigquery": - checkBigqueryImportSpecOption(spec); - checkArgument( - !spec.getSchema().getEntityIdColumn().equals(""), - "entityId column must be specified in schema"); - break; - default: - throw new IllegalArgumentException( - Strings.lenientFormat("Type %s not supported", spec.getType())); - } - spec.getSchema() - .getFieldsList() - .stream() - .map(Field::getFeatureId) - .filter(featureId -> !featureId.equals("")) - .forEach( - id -> - checkArgument( - featureInfoRepository.existsById(id), - Strings.lenientFormat("Feature %s not registered", id))); - for (String name : spec.getEntitiesList()) { - checkArgument( - entityInfoRepository.existsById(name), - Strings.lenientFormat("Entity %s not registered", name)); - } - Map jobOptions = spec.getJobOptionsMap(); - if (jobOptions.size() > 0) { - List opts = Lists.newArrayList( - "sample.limit", - "coalesceRows.enabled", - "coalesceRows.delaySeconds", - "coalesceRows.timeoutSeconds" - ); - for (String key : jobOptions.keySet()) { - Preconditions.checkArgument(opts.contains(key), - Strings.lenientFormat("Option %s is not a valid jobOption", key)); - } - } - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat("Validation for import spec failed: %s", e.getMessage())); - } - } - - private void checkKafkaImportSpecOption(ImportSpec spec) { - try { - String topics = spec.getSourceOptionsOrDefault("topics", ""); - String server = spec.getSourceOptionsOrDefault("server", ""); - if (topics.equals("") && server.equals("")) { - throw new IllegalArgumentException( - "Kafka ingestion requires either topics or servers"); - } - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat("Invalid options: %s", e.getMessage())); - } - } - - private void checkFileImportSpecOption(ImportSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getSourceOptionsOrDefault("path", "").equals(""), - "File path cannot be empty"); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat("Invalid options: %s", e.getMessage())); - } - } - - private void checkPubSubImportSpecOption(ImportSpec spec) throws IllegalArgumentException { - try { - String topic = spec.getSourceOptionsOrDefault("topic", ""); - String subscription = spec.getSourceOptionsOrDefault("subscription", ""); - if (topic.equals("") && subscription.equals("")) { - throw new IllegalArgumentException( - "Pubsub ingestion requires either topic or subscription"); - } - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat("Invalid options: %s", e.getMessage())); - } - } - - private void checkBigqueryImportSpecOption(ImportSpec spec) throws IllegalArgumentException { - try { - checkArgument(!spec.getSourceOptionsOrThrow("project").equals(""), - "Bigquery project cannot be empty"); - checkArgument(!spec.getSourceOptionsOrThrow("dataset").equals(""), - "Bigquery dataset cannot be empty"); - checkArgument(!spec.getSourceOptionsOrThrow("table").equals(""), - "Bigquery table cannot be empty"); - } catch (NullPointerException | IllegalArgumentException e) { - throw new IllegalArgumentException( - Strings.lenientFormat("Invalid options: %s", e.getMessage())); - } - } -} \ No newline at end of file diff --git a/core/src/main/resources/application.properties b/core/src/main/resources/application.properties deleted file mode 100644 index 58d8af33b3f..00000000000 --- a/core/src/main/resources/application.properties +++ /dev/null @@ -1,57 +0,0 @@ -# -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -grpc.port=${GRPC_PORT:6565} - -feast.core.projectId = ${PROJECT_ID:} -feast.core.datasetPrefix = ${DATASET_PREFIX:fs} - -feast.jobs.runner=${JOB_RUNNER:DirectRunner} -feast.jobs.workspace=${JOB_WORKSPACE} -feast.jobs.options=${JOB_OPTIONS:{}} -feast.jobs.executable=${JOB_EXECUTABLE:feast-ingestion.jar} - -feast.jobs.dataflow.projectId = ${DATAFLOW_PROJECT_ID:} -feast.jobs.dataflow.location = ${DATAFLOW_LOCATION:} - -feast.jobs.flink.configDir = ${FLINK_CONF_DIR:/etc/flink/flink-1.5.5/conf} -feast.jobs.flink.masterUrl = ${FLINK_MASTER_URL:localhost:8081} - -feast.jobs.monitor.period = ${JOB_MONITOR_PERIOD_MS:5000} -feast.jobs.monitor.initialDelay = ${JOB_MONITOR_INITIAL_DELAY_MS:60000} - -feast.store.serving.type = ${STORE_SERVING_TYPE:} -feast.store.serving.options = ${STORE_SERVING_OPTIONS:{}} -feast.store.warehouse.type = ${STORE_WAREHOUSE_TYPE:} -feast.store.warehouse.options = ${STORE_WAREHOUSE_OPTIONS:{}} -feast.store.errors.type = ${STORE_ERRORS_TYPE:} -feast.store.errors.options = ${STORE_ERRORS_OPTIONS:{}} - - -spring.jpa.properties.hibernate.format_sql=true -spring.datasource.url=jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/postgres -spring.datasource.username=${DB_USERNAME:postgres} -spring.datasource.password=${DB_PASSWORD:password} -spring.jpa.hibernate.naming.physical-strategy=org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl -spring.jpa.hibernate.ddl-auto=update - -statsd.host= ${STATSD_HOST:localhost} -statsd.port= ${STATSD_PORT:8125} - -management.metrics.export.simple.enabled=false -management.metrics.export.statsd.enabled=true -management.metrics.export.statsd.host=${STATSD_HOST:localhost} -management.metrics.export.statsd.port=${STATSD_PORT:8125} diff --git a/core/src/main/resources/application.yml b/core/src/main/resources/application.yml new file mode 100644 index 00000000000..be4f8c6b72a --- /dev/null +++ b/core/src/main/resources/application.yml @@ -0,0 +1,74 @@ +# +# Copyright 2018 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +grpc: + # The port number Feast Serving GRPC service should listen on + port: 6565 + # This allows client to discover GRPC endpoints easily + # https://github.com/grpc/grpc-java/blob/master/documentation/server-reflection-tutorial.md + enable-reflection: true + +feast: +# version: @project.version@ + jobs: + # Runner type for feature population jobs. Currently supported runner types are + # DirectRunner and DataflowRunner. + runner: DirectRunner + # Key-value dict of job options to be passed to the population jobs. + options: {} + metrics: + # Enable metrics pushing for all ingestion jobs. + enabled: false + # Type of metrics sink. Only prometheus is currently supported. + type: prometheus + # Host of the metrics sink. In the case of prometheus, this is the host of the prometheus + # pushGateway to sink metrics to. + host: localhost + # Port of the metrics sink. In the case of prometheus, this is the port of the prometheus + # pushGateway to sink metrics to. + port: 9091 + + stream: + # Feature stream type. Only kafka is supported. + type: kafka + # Feature stream options. + options: + topic: feast-features + bootstrapServers: kafka:9092 + replicationFactor: 1 + partitions: 1 + +spring: + jpa: + properties.hibernate.format_sql: true + hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl + hibernate.ddl-auto: update + datasource: + driverClassName: org.postgresql.Driver + url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} + username: ${DB_USERNAME:postgres} + password: ${DB_PASSWORD:password} + +management: + metrics: + export: + simple: + enabled: false + statsd: + enabled: true + host: ${STATSD_HOST:localhost} + port: ${STATSD_PORT:8125} diff --git a/core/src/main/resources/banner.txt b/core/src/main/resources/banner.txt new file mode 100644 index 00000000000..d0f1c033ebe --- /dev/null +++ b/core/src/main/resources/banner.txt @@ -0,0 +1,14 @@ + +███████╗███████╗ █████╗ ███████╗████████╗ +██╔════╝██╔════╝██╔══██╗██╔════╝╚══██╔══╝ +█████╗ █████╗ ███████║███████╗ ██║ +██╔══╝ ██╔══╝ ██╔══██║╚════██║ ██║ +██║ ███████╗██║ ██║███████║ ██║ +╚═╝ ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═╝ + + ██████╗ ██████╗ ██████╗ ███████╗ +██╔════╝██╔═══██╗██╔══██╗██╔════╝ +██║ ██║ ██║██████╔╝█████╗ +██║ ██║ ██║██╔══██╗██╔══╝ +╚██████╗╚██████╔╝██║ ██║███████╗ + ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝ diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index 01f92fcd006..bf2872c73a8 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -1,152 +1,128 @@ package feast.core; - -import static feast.core.config.StorageConfig.DEFAULT_ERRORS_ID; -import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; - -import com.google.protobuf.Timestamp; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobRequest; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobResponse; -import feast.core.config.ImportJobDefaults; -import feast.core.job.JobManager; -import feast.core.model.StorageInfo; -import feast.core.service.SpecService; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.mockito.stubbing.Answer; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; - -/** - * Starts the application context with some properties - */ -@RunWith(SpringRunner.class) -@SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", - "spring.datasource.url=jdbc:h2:mem:testdb", - "feast.store.warehouse.type=file.json", - "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", - "feast.store.serving.type=redis", - "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", - "feast.store.errors.type=stderr" -}) -@DirtiesContext +// +//import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; +//import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; +//import static org.junit.Assert.assertEquals; +//import static org.mockito.ArgumentMatchers.any; +//import static org.mockito.Mockito.when; +// +//import feast.core.config.ImportJobDefaults; +//import feast.core.job.JobManager; +//import feast.core.model.StorageInfo; +//import feast.core.service.SpecService; +//import feast.core.stream.FeatureStream; +//import feast.specs.EntitySpecProto.EntitySpec; +//import feast.specs.FeatureSpecProto.FeatureSpec; +//import feast.specs.StorageSpecProto.StorageSpec; +//import feast.types.ValueProto.ValueType; +//import io.grpc.ManagedChannel; +//import io.grpc.ManagedChannelBuilder; +//import java.io.IOException; +//import java.nio.file.Files; +//import java.nio.file.Paths; +//import java.util.Collections; +//import java.util.HashMap; +//import java.util.List; +//import java.util.Map; +//import org.junit.Test; +//import org.junit.runner.RunWith; +//import org.mockito.ArgumentMatchers; +//import org.mockito.Mockito; +//import org.mockito.stubbing.Answer; +//import org.springframework.beans.factory.annotation.Autowired; +//import org.springframework.boot.test.context.SpringBootTest; +//import org.springframework.boot.test.context.TestConfiguration; +//import org.springframework.context.annotation.Bean; +//import org.springframework.test.annotation.DirtiesContext; +//import org.springframework.test.context.junit4.SpringRunner; +// +///** +// * Starts the application context with some properties +// */ +//@RunWith(SpringRunner.class) +//@SpringBootTest(properties = { +// "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", +// "spring.datasource.url=jdbc:h2:mem:testdb", +// "feast.store.warehouse.type=FILE.JSON", +// "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", +// "feast.store.serving.type=REDIS", +// "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", +// "feast.store.errors.type=STDERR", +// "feast.stream.type=kafka", +// "feast.stream.options={\"bootstrapServers\":\"localhost:8081\"}" +//}) +//@DirtiesContext public class CoreApplicationTest { - - @Autowired - SpecService specService; - @Autowired - ImportJobDefaults jobDefaults; - @Autowired - JobManager jobManager; - - @Test - public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException { - Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); - - List warehouseStorageInfo = specService - .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); - assertEquals(warehouseStorageInfo.size(), 1); - assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - .setId(DEFAULT_WAREHOUSE_ID).setType("file.json").putOptions("path", "/tmp/foobar") - .build()); - - List servingStorageInfo = specService - .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); - assertEquals(servingStorageInfo.size(), 1); - assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - .setId(DEFAULT_SERVING_ID).setType("redis") - .putOptions("host", "localhost") - .putOptions("port", "1234") - .build()); - - ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", 6565); - ManagedChannel channel = channelBuilder.usePlaintext(true).build(); - CoreServiceGrpc.CoreServiceBlockingStub coreService = CoreServiceGrpc.newBlockingStub(channel); - JobServiceGrpc.JobServiceBlockingStub jobService = JobServiceGrpc.newBlockingStub(channel); - - EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); - FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.int64") - .setName("int64") - .setEntity("test") - .setValueType(ValueType.Enum.INT64) - .setOwner("hermione@example.com") - .setDescription("Test is a test") - .setUri("http://example.com/test.int64").build(); - ImportSpec importSpec = ImportSpec.newBuilder() - .setSchema(Schema.newBuilder() - .setEntityIdColumn("id") - .setTimestampValue(Timestamp.getDefaultInstance()) - .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) - .addEntities("test") - .setType("file.csv") - .putSourceOptions("path", "/tmp/foobar").build(); - - coreService.applyEntity(entitySpec); - coreService.applyFeature(featureSpec); - SubmitImportJobRequest jobSubmitReq = SubmitImportJobRequest.newBuilder() - .setImportSpec(importSpec).build(); - - Map args = new HashMap<>(); - Mockito.when(jobManager.submitJob(any(), any())).thenAnswer((Answer) invocation -> { - args.put(0, invocation.getArgument(0)); - args.put(1, invocation.getArgument(1)); - return "externalJobId1234"; - }); - SubmitImportJobResponse jobSubmitRes = jobService.submitJob(jobSubmitReq); - String jobId = jobSubmitRes.getJobId(); - assertEquals(args.get(1), Paths.get(jobDefaults.getWorkspace()).resolve(jobId)); - assertEquals(ImportJobSpecs.newBuilder() - .setJobId(jobId) - .setImportSpec(importSpec) - .setErrorsStorageSpec(StorageSpec.newBuilder() - .setId(DEFAULT_ERRORS_ID) - .setType("stderr")) - .addEntitySpecs(entitySpec) - .addFeatureSpecs(featureSpec) - .setServingStorageSpec(StorageSpec.newBuilder() - .setId(DEFAULT_SERVING_ID) - .setType("redis") - .putOptions("host", "localhost").putOptions("port", "1234")) - .setWarehouseStorageSpec(StorageSpec.newBuilder() - .setId(DEFAULT_WAREHOUSE_ID) - .setType("file.json") - .putOptions("path", "/tmp/foobar")) - .build(), args.get(0)); - } - - @TestConfiguration - public static class MockProvider { - - @Bean - public JobManager jobManager() { - return Mockito.mock(JobManager.class); - } - } +// +// @Autowired +// SpecService specService; +// @Autowired +// ImportJobDefaults jobDefaults; +// @Autowired +// JobManager jobManager; +// @Autowired +// FeatureStream featureStream; +// +// @Test +// public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException { +// Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); +// +// List warehouseStorageInfo = specService +// .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); +// assertEquals(warehouseStorageInfo.size(), 1); +// assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() +// .setId(DEFAULT_WAREHOUSE_ID).setType("FILE.JSON").putOptions("path", "/tmp/foobar") +// .build()); +// +// List servingStorageInfo = specService +// .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); +// assertEquals(servingStorageInfo.size(), 1); +// assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() +// .setId(DEFAULT_SERVING_ID).setType("REDIS") +// .putOptions("host", "localhost") +// .putOptions("port", "1234") +// .build()); +// +// ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", 6565); +// ManagedChannel channel = channelBuilder.usePlaintext(true).build(); +// CoreServiceGrpc.CoreServiceBlockingStub coreService = CoreServiceGrpc.newBlockingStub(channel); +// +// EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); +// FeatureSpec featureSpec = FeatureSpec.newBuilder() +// .setId("test.int64") +// .setName("int64") +// .setEntity("test") +// .setValueType(ValueType.Enum.INT64) +// .setOwner("hermione@example.com") +// .setDescription("Test is a test") +// .setUri("http://example.com/test.int64").build(); +// +// when(featureStream.generateTopicName(ArgumentMatchers.anyString())).thenReturn("my-topic"); +// when(featureStream.getType()).thenReturn("kafka"); +// +// coreService.applyEntity(entitySpec); +// +// Map args = new HashMap<>(); +// when(jobManager.startJob(any(), any())).thenAnswer((Answer) invocation -> { +// args.put(0, invocation.getArgument(0)); +// args.put(1, invocation.getArgument(1)); +// return "externalJobId1234"; +// }); +// +// coreService.applyFeature(featureSpec); +// } +// +// @TestConfiguration +// public static class MockProvider { +// +// @Bean +// public JobManager jobManager() { +// return Mockito.mock(JobManager.class); +// } +// +// @Bean +// public FeatureStream featureStream() { +// return Mockito.mock(FeatureStream.class); +// } +// } } \ No newline at end of file diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java deleted file mode 100644 index 4b6cbb7838b..00000000000 --- a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java +++ /dev/null @@ -1,108 +0,0 @@ -package feast.core; - -import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -import com.google.protobuf.Timestamp; -import feast.core.config.ImportJobDefaults; -import feast.core.job.JobManager; -import feast.core.model.StorageInfo; -import feast.core.service.SpecService; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; - -/** - * Starts the application context with some properties - */ -@RunWith(SpringRunner.class) -@SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", - "spring.datasource.url=jdbc:h2:mem:testdb", - "feast.store.warehouse.type=file.json", - "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", - "feast.store.errors.type=stderr" -}) -@DirtiesContext -public class CoreApplicationWithNoServingTest { - - @Autowired - SpecService specService; - @Autowired - ImportJobDefaults jobDefaults; - @Autowired - JobManager jobManager; - - @Test - public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException { - Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); - - List warehouseStorageInfo = specService - .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); - assertEquals(warehouseStorageInfo.size(), 1); - assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - .setId(DEFAULT_WAREHOUSE_ID).setType("file.json").putOptions("path", "/tmp/foobar") - .build()); - - ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", 6565); - ManagedChannel channel = channelBuilder.usePlaintext(true).build(); - CoreServiceGrpc.CoreServiceBlockingStub coreService = CoreServiceGrpc.newBlockingStub(channel); - JobServiceGrpc.JobServiceBlockingStub jobService = JobServiceGrpc.newBlockingStub(channel); - - EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); - FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.int64") - .setName("int64") - .setEntity("test") - .setValueType(ValueType.Enum.INT64) - .setOwner("hermione@example.com") - .setDescription("Test is a test") - .setUri("http://example.com/test.int64").build(); - ImportSpec importSpec = ImportSpec.newBuilder() - .setSchema(Schema.newBuilder() - .setEntityIdColumn("id") - .setTimestampValue(Timestamp.getDefaultInstance()) - .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) - .addEntities("test") - .setType("file.csv") - .putSourceOptions("path", "/tmp/foobar").build(); - - coreService.applyEntity(entitySpec); - try { - coreService.applyFeature(featureSpec); - fail("should fail validation as there is not serving store"); - } catch (Exception e) { - } - } - - @TestConfiguration - public static class MockProvider { - - @Bean - public JobManager jobManager() { - return Mockito.mock(JobManager.class); - } - } -} \ No newline at end of file diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java deleted file mode 100644 index 1acd238b8ba..00000000000 --- a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java +++ /dev/null @@ -1,146 +0,0 @@ -package feast.core; - -import static feast.core.config.StorageConfig.DEFAULT_ERRORS_ID; -import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; - -import com.google.protobuf.Timestamp; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobRequest; -import feast.core.JobServiceProto.JobServiceTypes.SubmitImportJobResponse; -import feast.core.config.ImportJobDefaults; -import feast.core.job.JobManager; -import feast.core.model.StorageInfo; -import feast.core.service.SpecService; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.mockito.stubbing.Answer; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.context.TestConfiguration; -import org.springframework.context.annotation.Bean; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; - -/** - * Starts the application context with some properties - */ -@RunWith(SpringRunner.class) -@SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", - "spring.datasource.url=jdbc:h2:mem:testdb", - "feast.store.serving.type=redis", - "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", - "feast.store.errors.type=stderr" -}) -@DirtiesContext -public class CoreApplicationWithNoWarehouseTest { - - @Autowired - SpecService specService; - @Autowired - ImportJobDefaults jobDefaults; - @Autowired - JobManager jobManager; - - @Test - public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException { - Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); - - try { - specService.getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); - fail("should have thrown exception"); - } catch (feast.core.exception.RetrievalException e) { - } - - List servingStorageInfo = specService - .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); - assertEquals(servingStorageInfo.size(), 1); - assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - .setId(DEFAULT_SERVING_ID).setType("redis") - .putOptions("host", "localhost") - .putOptions("port", "1234") - .build()); - - ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", 6565); - ManagedChannel channel = channelBuilder.usePlaintext(true).build(); - CoreServiceGrpc.CoreServiceBlockingStub coreService = CoreServiceGrpc.newBlockingStub(channel); - JobServiceGrpc.JobServiceBlockingStub jobService = JobServiceGrpc.newBlockingStub(channel); - - EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); - FeatureSpec featureSpec = FeatureSpec.newBuilder() - .setId("test.int64") - .setName("int64") - .setEntity("test") - .setValueType(ValueType.Enum.INT64) - .setOwner("hermione@example.com") - .setDescription("Test is a test") - .setUri("http://example.com/test.int64").build(); - ImportSpec importSpec = ImportSpec.newBuilder() - .setSchema(Schema.newBuilder() - .setEntityIdColumn("id") - .setTimestampValue(Timestamp.getDefaultInstance()) - .addFields(Field.newBuilder().setName("id")) - .addFields(Field.newBuilder().setName("a").setFeatureId("test.int64"))) - .addEntities("test") - .setType("file.csv") - .putSourceOptions("path", "/tmp/foobar").build(); - - coreService.applyEntity(entitySpec); - coreService.applyFeature(featureSpec); - SubmitImportJobRequest jobSubmitReq = SubmitImportJobRequest.newBuilder() - .setImportSpec(importSpec).build(); - - Map args = new HashMap<>(); - Mockito.when(jobManager.submitJob(any(), any())).thenAnswer((Answer) invocation -> { - args.put(0, invocation.getArgument(0)); - args.put(1, invocation.getArgument(1)); - return "externalJobId1234"; - }); - SubmitImportJobResponse jobSubmitRes = jobService.submitJob(jobSubmitReq); - String jobId = jobSubmitRes.getJobId(); - assertEquals(args.get(1), Paths.get(jobDefaults.getWorkspace()).resolve(jobId)); - assertEquals(ImportJobSpecs.newBuilder() - .setJobId(jobId) - .setImportSpec(importSpec) - .setErrorsStorageSpec(StorageSpec.newBuilder() - .setId(DEFAULT_ERRORS_ID) - .setType("stderr")) - .addEntitySpecs(entitySpec) - .addFeatureSpecs(featureSpec) - .setServingStorageSpec(StorageSpec.newBuilder() - .setId(DEFAULT_SERVING_ID) - .setType("redis") - .putOptions("host", "localhost").putOptions("port", "1234")) - .build(), args.get(0)); - } - - @TestConfiguration - public static class MockProvider { - - @Bean - public JobManager jobManager() { - return Mockito.mock(JobManager.class); - } - } -} \ No newline at end of file diff --git a/core/src/test/java/feast/core/config/StorageConfigTest.java b/core/src/test/java/feast/core/config/StorageConfigTest.java deleted file mode 100644 index a8be939117c..00000000000 --- a/core/src/test/java/feast/core/config/StorageConfigTest.java +++ /dev/null @@ -1,71 +0,0 @@ -package feast.core.config; - -import static feast.core.config.StorageConfig.DEFAULT_ERRORS_ID; -import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.mockito.Mockito.verify; - -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.validators.SpecValidator; -import feast.specs.StorageSpecProto.StorageSpec; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - -public class StorageConfigTest { - - public SpecValidator validator = Mockito.mock(SpecValidator.class); - - StorageConfig config; - - @Before - public void before() { - config = new StorageConfig(validator); - } - - @Test - public void testBuildErrorsStorageSpec() { - StorageSpecs storageSpecs = config.getStorageSpecs("", "", "", - "", "file.json", "{\"path\": \"/tmp/errors\"}"); - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId(DEFAULT_ERRORS_ID).setType("file.json") - .putOptions("path", "/tmp/errors").build(); - assertEquals(storageSpec, storageSpecs.getErrorsStorageSpec()); - assertNull(storageSpecs.getServingStorageSpec()); - assertNull(storageSpecs.getWarehouseStorageSpec()); - verify(validator).validateErrorsStorageSpec(storageSpec); - } - - @Test - public void testBuildServingStorageSpec() { - StorageSpecs storageSpecs = config.getStorageSpecs( - "redis", "{\"host\": \"localhost\", \"port\": \"1234\"}", "", - "", "", ""); - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId(DEFAULT_SERVING_ID).setType("redis") - .putOptions("host", "localhost") - .putOptions("port", "1234").build(); - assertEquals(storageSpec, storageSpecs.getServingStorageSpec()); - assertNull(storageSpecs.getErrorsStorageSpec()); - assertNull(storageSpecs.getWarehouseStorageSpec()); - verify(validator).validateServingStorageSpec(storageSpec); - } - - @Test - public void testBuildWarehouseStorageSpec() { - StorageSpecs storageSpecs = config.getStorageSpecs( - "", "", - "bigquery", "{\"project\": \"project1\", \"dataset\": \"feast\"}", - "", ""); - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId(DEFAULT_WAREHOUSE_ID).setType("bigquery") - .putOptions("project", "project1") - .putOptions("dataset", "feast").build(); - assertEquals(storageSpec, storageSpecs.getWarehouseStorageSpec()); - assertNull(storageSpecs.getErrorsStorageSpec()); - assertNull(storageSpecs.getServingStorageSpec()); - verify(validator).validateWarehouseStorageSpec(storageSpec); - } -} diff --git a/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java b/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java deleted file mode 100644 index 88bea40d9e8..00000000000 --- a/core/src/test/java/feast/core/grpc/DatasetServiceImplTest.java +++ /dev/null @@ -1,249 +0,0 @@ -package feast.core.grpc; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import feast.core.DatasetServiceGrpc; -import feast.core.DatasetServiceProto.DatasetInfo; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.DatasetServiceProto.DatasetServiceTypes.CreateDatasetRequest; -import feast.core.DatasetServiceProto.DatasetServiceTypes.CreateDatasetResponse; -import feast.core.training.BigQueryTraningDatasetCreator; -import io.grpc.StatusRuntimeException; -import io.grpc.inprocess.InProcessChannelBuilder; -import io.grpc.inprocess.InProcessServerBuilder; -import io.grpc.testing.GrpcCleanupRule; -import java.text.ParseException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class DatasetServiceImplTest { - - @Rule public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); - @Rule public final ExpectedException expectedException = ExpectedException.none(); - - @Mock private BigQueryTraningDatasetCreator trainingDatasetCreator; - private DatasetServiceGrpc.DatasetServiceBlockingStub client; - - private Timestamp validStartDate; - private Timestamp validEndDate; - private FeatureSet validFeatureSet; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - DatasetServiceImpl DatasetService = new DatasetServiceImpl(trainingDatasetCreator); - String serverName = InProcessServerBuilder.generateName(); - - grpcCleanup.register( - InProcessServerBuilder.forName(serverName) - .directExecutor() - .addService(DatasetService) - .build() - .start()); - - client = - DatasetServiceGrpc.newBlockingStub( - InProcessChannelBuilder.forName(serverName).directExecutor().build()); - - validStartDate = Timestamps.parse("2018-01-02T10:00:20.021-05:00"); - validEndDate = Timestamps.parse("2018-12-01T10:00:20.021-05:00"); - validFeatureSet = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addFeatureIds("myentity.feature1") - .addFeatureIds("myentity.feature2") - .build(); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldCallcreateDatasetWithCorrectRequest() { - DatasetInfo datasetInfo = - DatasetInfo.newBuilder().setName("mydataset").setTableUrl("project.dataset.table").build(); - when(trainingDatasetCreator.createDataset( - any(FeatureSet.class), - any(Timestamp.class), - any(Timestamp.class), - anyLong(), - anyString(), - anyMap())) - .thenReturn(datasetInfo); - - long limit = 9999; - String namePrefix = "mydataset"; - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(validFeatureSet) - .setStartDate(validStartDate) - .setEndDate(validEndDate) - .setLimit(limit) - .setNamePrefix(namePrefix) - .build(); - - client.createDataset(request); - - verify(trainingDatasetCreator) - .createDataset(validFeatureSet, validStartDate, validEndDate, limit, namePrefix, Collections - .emptyMap()); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldCallcreateDatasetWithCorrectRequestWithFilters() { - DatasetInfo datasetInfo = - DatasetInfo.newBuilder().setName("mydataset").setTableUrl("project.dataset.table").build(); - when(trainingDatasetCreator.createDataset( - any(FeatureSet.class), - any(Timestamp.class), - any(Timestamp.class), - anyLong(), - anyString(), - anyMap())) - .thenReturn(datasetInfo); - - long limit = 9999; - String namePrefix = "mydataset"; - Map filters = new HashMap<>(); - filters.put("key1", "value1"); - filters.put("key2", "value2"); - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(validFeatureSet) - .setStartDate(validStartDate) - .setEndDate(validEndDate) - .setLimit(limit) - .setNamePrefix(namePrefix) - .putAllFilters(filters) - .build(); - - client.createDataset(request); - - - verify(trainingDatasetCreator) - .createDataset(validFeatureSet, validStartDate, validEndDate, limit, namePrefix, filters); - } - - @Test - public void shouldPropagateCreatedDatasetInfo() { - DatasetInfo datasetInfo = - DatasetInfo.newBuilder().setName("mydataset").setTableUrl("project.dataset.table").build(); - when(trainingDatasetCreator.createDataset( - any(FeatureSet.class), - any(Timestamp.class), - any(Timestamp.class), - anyLong(), - anyString(), - anyMap())) - .thenReturn(datasetInfo); - - long limit = 9999; - String namePrefix = "mydataset"; - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(validFeatureSet) - .setStartDate(validEndDate) - .setEndDate(validEndDate) - .setLimit(limit) - .setNamePrefix(namePrefix) - .build(); - - CreateDatasetResponse resp = client.createDataset(request); - DatasetInfo actual = resp.getDatasetInfo(); - - assertThat(actual, equalTo(datasetInfo)); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldThrowExceptionIfFeatureSetEmpty() { - FeatureSet emptyFeatureSet = FeatureSet.newBuilder().setEntityName("myentity").build(); - - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(emptyFeatureSet) - .setStartDate(validStartDate) - .setEndDate(validEndDate) - .build(); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("feature set is empty"); - - client.createDataset(request); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldThrowExceptionIfFeatureSetHasEmptyEntity() { - FeatureSet emptyFeatureSet = FeatureSet.newBuilder().setEntityName("").build(); - - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(emptyFeatureSet) - .setStartDate(validStartDate) - .setEndDate(validEndDate) - .build(); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("entity name in feature set is null or empty"); - - client.createDataset(request); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldThrowExceptionIfFeatureSetHasDifferentEntity() { - FeatureSet emptyFeatureSet = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addFeatureIds("myentity.feature1") - .addFeatureIds("driver.feature2") - .build(); - - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(emptyFeatureSet) - .setStartDate(validStartDate) - .setEndDate(validEndDate) - .build(); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("feature set contains different entity name: driver"); - - client.createDataset(request); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - public void shouldThrowExceptionWhenStartDateIsAfterEndDate() throws ParseException { - Timestamp laterStartDate = Timestamps.parse("2020-12-01T10:00:20.021-05:00"); - - CreateDatasetRequest request = - CreateDatasetRequest.newBuilder() - .setFeatureSet(validFeatureSet) - .setStartDate(laterStartDate) - .setEndDate(validEndDate) - .build(); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("startDate is after endDate"); - - client.createDataset(request); - } -} diff --git a/core/src/test/java/feast/core/grpc/UIServiceImplTest.java b/core/src/test/java/feast/core/grpc/UIServiceImplTest.java deleted file mode 100644 index 48f4623c269..00000000000 --- a/core/src/test/java/feast/core/grpc/UIServiceImplTest.java +++ /dev/null @@ -1,439 +0,0 @@ -package feast.core.grpc; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.protobuf.Empty; -import feast.core.UIServiceGrpc; -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.core.UIServiceProto.UIServiceTypes.GetEntityRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetEntityResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureGroupRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureGroupResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetFeatureResponse; -import feast.core.UIServiceProto.UIServiceTypes.GetStorageRequest; -import feast.core.UIServiceProto.UIServiceTypes.GetStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListEntitiesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeatureGroupsResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListFeaturesResponse; -import feast.core.UIServiceProto.UIServiceTypes.ListStorageResponse; -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.service.SpecService; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import io.grpc.StatusRuntimeException; -import io.grpc.inprocess.InProcessChannelBuilder; -import io.grpc.inprocess.InProcessServerBuilder; -import io.grpc.testing.GrpcCleanupRule; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.Mock; - -public class UIServiceImplTest { - - @Rule - public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - @Mock - public SpecService specService; - private UIServiceGrpc.UIServiceBlockingStub client; - - private StorageSpecs storageSpecs; - - @Before - public void setUp() throws Exception { - specService = mock(SpecService.class); - storageSpecs = StorageSpecs.builder().build(); - - UIServiceImpl service = new UIServiceImpl(specService); - - String serverName = InProcessServerBuilder.generateName(); - grpcCleanup.register( - InProcessServerBuilder.forName(serverName) - .directExecutor() - .addService(service) - .build() - .start()); - - when(specService.getStorageSpecs()).thenReturn(storageSpecs); - - client = - UIServiceGrpc.newBlockingStub( - grpcCleanup.register( - InProcessChannelBuilder.forName(serverName).directExecutor().build())); - } - - @Test - public void getEntity_shouldReturnCorrectEntityDetail() { - String entityName = "entity"; - GetEntityRequest req = GetEntityRequest.newBuilder().setId(entityName).build(); - - EntitySpec entitySpec = - EntitySpec.newBuilder().setName(entityName).setDescription("test entity").build(); - - EntityInfo entityInfo = new EntityInfo(entitySpec); - entityInfo.setLastUpdated(new Date()); - - when(specService.getEntities(Collections.singletonList(entityName))) - .thenReturn(Collections.singletonList(entityInfo)); - - GetEntityResponse resp = client.getEntity(req); - EntityDetail actual = resp.getEntity(); - - assertThat(actual, equalTo(entityInfo.getEntityDetail())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getEntity_shouldReturnClearErrorMessageForInvalidEntityName() { - String entityName = ""; - GetEntityRequest req = GetEntityRequest.newBuilder().setId(entityName).build(); - - EntitySpec entitySpec = - EntitySpec.newBuilder().setName(entityName).setDescription("test entity").build(); - - EntityInfo entityInfo = new EntityInfo(entitySpec); - entityInfo.setLastUpdated(new Date()); - - when(specService.getEntities(Collections.singletonList(entityName))) - .thenThrow(new IllegalArgumentException("invalid entity name")); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Invalid entity name: " + entityName); - client.getEntity(req); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getEntity_shouldReturnClearErrorMessageForAnyFailure() { - String entityName = ""; - GetEntityRequest req = GetEntityRequest.newBuilder().setId(entityName).build(); - - EntitySpec entitySpec = - EntitySpec.newBuilder().setName(entityName).setDescription("test entity").build(); - - EntityInfo entityInfo = new EntityInfo(entitySpec); - entityInfo.setLastUpdated(new Date()); - - when(specService.getEntities(Collections.singletonList(entityName))) - .thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while retrieving entity with name: " + entityName); - client.getEntity(req); - } - - @Test - public void listEntities_shouldReturnAllEntities() { - EntitySpec entitySpec1 = - EntitySpec.newBuilder().setName("entity1").setDescription("test entity").build(); - EntityInfo entityInfo1 = new EntityInfo(entitySpec1); - entityInfo1.setLastUpdated(new Date()); - - EntitySpec entitySpec2 = - EntitySpec.newBuilder().setName("entity2").setDescription("test entity").build(); - EntityInfo entityInfo2 = new EntityInfo(entitySpec2); - entityInfo2.setLastUpdated(new Date()); - - List entityInfos = Arrays.asList(entityInfo1, entityInfo2); - - when(specService.listEntities()).thenReturn(entityInfos); - - ListEntitiesResponse resp = client.listEntities(Empty.getDefaultInstance()); - List actual = resp.getEntitiesList(); - - assertThat( - actual, - containsInAnyOrder(entityInfos.stream().map(EntityInfo::getEntityDetail).toArray())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void listEntities_shouldReturnClearErrorMessageForAnyFailure() { - when(specService.listEntities()).thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while getting all entities"); - - client.listEntities(Empty.getDefaultInstance()); - } - - @Test - public void getFeature_shouldReturnCorrectFeatureDetail() { - String featureId = "entity.feature"; - FeatureInfo featureInfo = createFeatureInfo(featureId); - - when(specService.getFeatures(Collections.singletonList(featureId))) - .thenReturn(Collections.singletonList(featureInfo)); - - GetFeatureRequest req = GetFeatureRequest.newBuilder().setId(featureId).build(); - GetFeatureResponse resp = client.getFeature(req); - - FeatureDetail expected = featureInfo.getFeatureDetail(storageSpecs); - FeatureDetail actual = resp.getFeature(); - - assertThat(actual, equalTo(expected)); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getFeature_shouldReturnInvalidArgumentForInvalidFeatureId() { - String featureId = "invalid.feature.id"; - - when(specService.getFeatures(Collections.singletonList(featureId))) - .thenThrow(new IllegalArgumentException()); - - GetFeatureRequest req = GetFeatureRequest.newBuilder().setId(featureId).build(); - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Invalid feature ID: " + featureId); - - client.getFeature(req); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getFeature_shouldReturnErrorForAnyFailure() { - String featureId = "invalid.feature.id"; - - when(specService.getFeatures(Collections.singletonList(featureId))) - .thenThrow(new RuntimeException()); - - GetFeatureRequest req = GetFeatureRequest.newBuilder().setId(featureId).build(); - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while retrieving feature with ID: " + featureId); - - client.getFeature(req); - } - - @Test - public void listFeature_shouldReturnAllFeatures() { - String featureId1 = "entity.feature1"; - String featureId2 = "entity.feature2"; - - FeatureInfo featureInfo1 = createFeatureInfo(featureId1); - FeatureInfo featureInfo2 = createFeatureInfo(featureId2); - - List featureInfos = Arrays.asList(featureInfo1, featureInfo2); - - when(specService.listFeatures()).thenReturn(featureInfos); - - ListFeaturesResponse resp = client.listFeatures(Empty.getDefaultInstance()); - List actual = resp.getFeaturesList(); - - assertThat( - actual, - containsInAnyOrder( - featureInfos.stream().map((fi) -> fi.getFeatureDetail(storageSpecs)).toArray())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void listFeature_shouldReturnClearErrorMessageForAnyFailure() { - when(specService.listFeatures()).thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while getting all features"); - - client.listFeatures(Empty.getDefaultInstance()); - } - - @Test - public void getFeatureGroup_shouldReturnCorrectFeatureGroup() { - String featureGroupId = "featureGroup"; - - FeatureGroupInfo featureGroupInfo = createFeatureGroupInfo(featureGroupId); - - when(specService.getFeatureGroups(Collections.singletonList(featureGroupId))) - .thenReturn(Collections.singletonList(featureGroupInfo)); - - GetFeatureGroupRequest req = GetFeatureGroupRequest.newBuilder().setId(featureGroupId).build(); - - GetFeatureGroupResponse resp = client.getFeatureGroup(req); - FeatureGroupDetail actual = resp.getFeatureGroup(); - - assertThat(actual, equalTo(featureGroupInfo.getFeatureGroupDetail())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getFeatureGroup_shouldReturnClearErrorMessageForInvalidId() { - String invalidFeatureGroupId = "invalidId"; - - when(specService.getFeatureGroups(Collections.singletonList(invalidFeatureGroupId))) - .thenThrow(new IllegalArgumentException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Invalid feature group ID: " + invalidFeatureGroupId); - - GetFeatureGroupRequest req = - GetFeatureGroupRequest.newBuilder().setId(invalidFeatureGroupId).build(); - - client.getFeatureGroup(req); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getFeatureGroup_shouldReturnClearErrorMessageForAnyFailure() { - String featureGroupId = "invalidId"; - - when(specService.getFeatureGroups(Collections.singletonList(featureGroupId))) - .thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while getting feature group with ID: " + featureGroupId); - - GetFeatureGroupRequest req = GetFeatureGroupRequest.newBuilder().setId(featureGroupId).build(); - - client.getFeatureGroup(req); - } - - @Test - public void listFeatureGroup_shouldReturnAllFeatureGroups() { - FeatureGroupInfo featureGroupInfo1 = createFeatureGroupInfo("featureGroup1"); - FeatureGroupInfo featureGroupInfo2 = createFeatureGroupInfo("featureGroup2"); - - List featureGroupInfos = Arrays.asList(featureGroupInfo1, featureGroupInfo2); - when(specService.listFeatureGroups()).thenReturn(featureGroupInfos); - - ListFeatureGroupsResponse resp = client.listFeatureGroups(Empty.getDefaultInstance()); - List actual = resp.getFeatureGroupsList(); - - assertThat( - actual, - containsInAnyOrder( - featureGroupInfos.stream().map(FeatureGroupInfo::getFeatureGroupDetail).toArray())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void listFeatureGroup_shouldReturnClearErrorMessageForAnyFailure() { - when(specService.listFeatureGroups()).thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while getting all feature groups"); - client.listFeatureGroups(Empty.getDefaultInstance()); - } - - @Test - public void getStorage_shouldReturnCorrectStorageDetail() { - String storageId = "mystorage"; - StorageSpec storageSpec = StorageSpec.newBuilder().setId(storageId).build(); - StorageInfo storageInfo = new StorageInfo(storageSpec); - storageInfo.setLastUpdated(new Date()); - - when(specService.getStorage(Collections.singletonList(storageId))) - .thenReturn(Collections.singletonList(storageInfo)); - - GetStorageRequest req = GetStorageRequest.newBuilder().setId(storageId).build(); - GetStorageResponse resp = client.getStorage(req); - StorageDetail actual = resp.getStorage(); - - assertThat(actual, equalTo(storageInfo.getStorageDetail())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getStorage_shouldReturnErrorForInvalidStorageId() { - String storageId = "invalid"; - - when(specService.getStorage(Collections.singletonList(storageId))) - .thenThrow(new IllegalArgumentException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Invalid storage ID: " + storageId); - - GetStorageRequest req = GetStorageRequest.newBuilder().setId(storageId).build(); - client.getStorage(req); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void getStorage_shouldReturnErrorForAnyFailure() { - String storageId = "myStorage"; - - when(specService.getStorage(Collections.singletonList(storageId))) - .thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while retrieving storage detail with ID: " + storageId); - - GetStorageRequest req = GetStorageRequest.newBuilder().setId(storageId).build(); - client.getStorage(req); - } - - @Test - public void listStorage_shouldReturnAllStorageDetail() { - String storageId1 = "storage1"; - StorageSpec storageSpec1 = StorageSpec.newBuilder().setId(storageId1).build(); - StorageInfo storageInfo1 = new StorageInfo(storageSpec1); - storageInfo1.setLastUpdated(new Date()); - - String storageId2 = "storage2"; - StorageSpec storageSpec2 = StorageSpec.newBuilder().setId(storageId2).build(); - StorageInfo storageInfo2 = new StorageInfo(storageSpec2); - storageInfo2.setLastUpdated(new Date()); - - List storageInfos = Arrays.asList(storageInfo1, storageInfo2); - - when(specService.listStorage()).thenReturn(storageInfos); - - ListStorageResponse resp = client.listStorage(Empty.getDefaultInstance()); - List actual = resp.getStorageList(); - - assertThat( - actual, - containsInAnyOrder(storageInfos.stream().map(StorageInfo::getStorageDetail).toArray())); - } - - @Test - @SuppressWarnings("ResultOfMethodCallIgnored") - public void listStorage_shouldReturnErrorForAnyFailure() { - when(specService.listStorage()).thenThrow(new RuntimeException()); - - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Error while getting all storage details"); - - client.listStorage(Empty.getDefaultInstance()); - } - - private FeatureInfo createFeatureInfo(String featureId) { - EntitySpec entitySpec = EntitySpec.newBuilder().setName("entity").build(); - - EntityInfo entityInfo = new EntityInfo(entitySpec); - FeatureSpec featureSpec = FeatureSpec.newBuilder().setId(featureId).build(); - FeatureInfo featureInfo = - new FeatureInfo(featureSpec, entityInfo, null); - featureInfo.setCreated(new Date()); - featureInfo.setLastUpdated(new Date()); - return featureInfo; - } - - private FeatureGroupInfo createFeatureGroupInfo(String featureGroupId) { - FeatureGroupSpec featureGroupSpec = FeatureGroupSpec.newBuilder().setId(featureGroupId).build(); - FeatureGroupInfo featureGroupInfo = - new FeatureGroupInfo(featureGroupSpec); - featureGroupInfo.setCreated(new Date()); - featureGroupInfo.setLastUpdated(new Date()); - return featureGroupInfo; - } -} diff --git a/core/src/test/java/feast/core/http/UiServiceControllerTest.java b/core/src/test/java/feast/core/http/UiServiceControllerTest.java deleted file mode 100644 index 8e312ae264e..00000000000 --- a/core/src/test/java/feast/core/http/UiServiceControllerTest.java +++ /dev/null @@ -1,180 +0,0 @@ -package feast.core.http; - -import static feast.specs.FeatureSpecProto.FeatureSpec; -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.service.JobManagementService; -import feast.core.service.SpecService; -import java.util.Collections; -import org.junit.Before; -import org.junit.Test; - -public class UiServiceControllerTest { - - private UiServiceController goodUiServiceController; - private UiServiceController badUiServiceController; - - @Before - public void setUp() throws Exception { - StorageSpecs storageSpecs = StorageSpecs.builder().build(); - - FeatureInfo mockFeatureInfo = mock(FeatureInfo.class); - when(mockFeatureInfo.getFeatureDetail(storageSpecs)) - .thenReturn(FeatureDetail.getDefaultInstance()); - when(mockFeatureInfo.getFeatureSpec()).thenReturn(FeatureSpec.getDefaultInstance()); - when(mockFeatureInfo.resolve()).thenReturn(mockFeatureInfo); - - EntityInfo mockEntityInfo = mock(EntityInfo.class); - when(mockEntityInfo.getEntityDetail()).thenReturn(EntityDetail.getDefaultInstance()); - - FeatureGroupInfo mockFeatureGroupInfo = mock(FeatureGroupInfo.class); - when(mockFeatureGroupInfo.getFeatureGroupDetail()) - .thenReturn(FeatureGroupDetail.getDefaultInstance()); - - StorageInfo mockStorageInfo = mock(StorageInfo.class); - when(mockStorageInfo.getStorageDetail()).thenReturn(StorageDetail.getDefaultInstance()); - - SpecService goodMockSpecService = mock(SpecService.class); - when(goodMockSpecService.getStorageSpecs()).thenReturn(storageSpecs); - when(goodMockSpecService.listFeatures()).thenReturn(Collections.singletonList(mockFeatureInfo)); - when(goodMockSpecService.getFeatures(Collections.singletonList("1"))) - .thenReturn(Collections.singletonList(mockFeatureInfo)); - when(goodMockSpecService.listFeatureGroups()) - .thenReturn(Collections.singletonList(mockFeatureGroupInfo)); - when(goodMockSpecService.getFeatureGroups(Collections.singletonList("1"))) - .thenReturn(Collections.singletonList(mockFeatureGroupInfo)); - when(goodMockSpecService.listEntities()).thenReturn(Collections.singletonList(mockEntityInfo)); - when(goodMockSpecService.getEntities(Collections.singletonList("1"))) - .thenReturn(Collections.singletonList(mockEntityInfo)); - when(goodMockSpecService.listStorage()).thenReturn(Collections.singletonList(mockStorageInfo)); - when(goodMockSpecService.getStorage(Collections.singletonList("1"))) - .thenReturn(Collections.singletonList(mockStorageInfo)); - - JobManagementService goodMockJobMangementService = mock(JobManagementService.class); - - goodUiServiceController = - new UiServiceController(goodMockSpecService, goodMockJobMangementService); - - SpecService badMockSpecService = mock(SpecService.class); - when(badMockSpecService.listFeatures()).thenReturn(null); - when(badMockSpecService.getFeatures(Collections.singletonList("1"))).thenReturn(null); - when(badMockSpecService.listFeatureGroups()).thenReturn(null); - when(badMockSpecService.getFeatureGroups(Collections.singletonList("1"))).thenReturn(null); - when(badMockSpecService.listEntities()).thenReturn(null); - when(badMockSpecService.getEntities(Collections.singletonList("1"))).thenReturn(null); - when(badMockSpecService.listStorage()).thenReturn(null); - when(badMockSpecService.getStorage(Collections.singletonList("1"))).thenReturn(null); - - JobManagementService badMockJobMangementService = mock(JobManagementService.class); - - badUiServiceController = - new UiServiceController(badMockSpecService, badMockJobMangementService); - } - - @Test - public void listFeatures() { - FeatureDetail expected = FeatureDetail.getDefaultInstance(); - FeatureDetail actual = goodUiServiceController.listFeatures().getFeaturesList().get(0); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void listFeaturesWithException() { - badUiServiceController.listFeatures(); - } - - @Test - public void getFeature() { - FeatureDetail expected = FeatureDetail.getDefaultInstance(); - FeatureDetail actual = goodUiServiceController.getFeature("1").getFeature(); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void getFeatureWithException() { - badUiServiceController.getFeature("1"); - } - - @Test - public void listFeatureGroups() { - FeatureGroupDetail expected = FeatureGroupDetail.getDefaultInstance(); - FeatureGroupDetail actual = goodUiServiceController.listFeatureGroups().getFeatureGroups(0); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void listFeatureGroupsWithException() { - badUiServiceController.listFeatureGroups(); - } - - @Test - public void getFeatureGroup() { - FeatureGroupDetail expected = FeatureGroupDetail.getDefaultInstance(); - FeatureGroupDetail actual = goodUiServiceController.getFeatureGroup("1").getFeatureGroup(); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void getFeatureGroupWithException() { - badUiServiceController.getFeatureGroup("1"); - } - - @Test - public void listEntities() { - EntityDetail expected = EntityDetail.getDefaultInstance(); - EntityDetail actual = goodUiServiceController.listEntities().getEntitiesList().get(0); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void listEntitiesWithException() { - badUiServiceController.listEntities(); - } - - @Test - public void getEntity() { - EntityDetail expected = EntityDetail.getDefaultInstance(); - EntityDetail actual = goodUiServiceController.getEntity("1").getEntity(); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void getEntityWithException() { - badUiServiceController.getEntity("1"); - } - - @Test - public void listStorage() { - StorageDetail expected = StorageDetail.getDefaultInstance(); - StorageDetail actual = goodUiServiceController.listStorage().getStorage(0); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void listStorageWithException() { - badUiServiceController.listStorage(); - } - - @Test - public void getStorage() { - StorageDetail expected = StorageDetail.getDefaultInstance(); - StorageDetail actual = goodUiServiceController.getStorage("1").getStorage(); - assertEquals(expected, actual); - } - - @Test(expected = Exception.class) - public void getStorageWithException() { - badUiServiceController.getStorage("1"); - } -} diff --git a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java index 1a05869657f..631e5a257a9 100644 --- a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java @@ -24,11 +24,18 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.google.common.collect.Lists; +import feast.core.SourceProto; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.SourceType; import feast.core.dao.JobInfoRepository; import feast.core.dao.MetricsRepository; import feast.core.model.JobInfo; import feast.core.model.JobStatus; import feast.core.model.Metrics; +import feast.core.model.Source; +import feast.core.model.Store; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -43,41 +50,39 @@ public class ScheduledJobMonitorTest { ScheduledJobMonitor scheduledJobMonitor; - @Mock JobMonitor jobMonitor; + @Mock + JobMonitor jobMonitor; - @Mock StatsdMetricPusher stasdMetricPusher; - - @Mock JobInfoRepository jobInfoRepository; - - @Mock MetricsRepository metricsRepository; + @Mock + JobInfoRepository jobInfoRepository; @Before - public void setUp() throws Exception { + public void setUp() { MockitoAnnotations.initMocks(this); - scheduledJobMonitor = new ScheduledJobMonitor(jobMonitor, jobInfoRepository, stasdMetricPusher); + scheduledJobMonitor = new ScheduledJobMonitor(jobMonitor, jobInfoRepository); } @Test public void getJobStatus_shouldUpdateJobInfoForRunningJob() { + Source source = new Source(SourceType.KAFKA, + KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092") + .setTopic("feast-topic").build(), true); JobInfo job = new JobInfo( "jobId", "extId1", - "Streaming", "DataflowRunner", - "", - "", - Collections.emptyList(), + source, + new Store(), Collections.emptyList(), Collections.emptyList(), - JobStatus.RUNNING, - ""); + JobStatus.RUNNING); when(jobInfoRepository.findByStatusNotIn((Collection) any(Collection.class))) .thenReturn(Collections.singletonList(job)); when(jobMonitor.getJobStatus(job)).thenReturn(JobStatus.COMPLETED); - scheduledJobMonitor.getJobStatus(); + scheduledJobMonitor.updateJobStatus(); ArgumentCaptor argCaptor = ArgumentCaptor.forClass(JobInfo.class); verify(jobInfoRepository).save(argCaptor.capture()); @@ -91,41 +96,9 @@ public void getJobStatus_shouldNotUpdateJobInfoForTerminalJob() { when(jobInfoRepository.findByStatusNotIn((Collection) any(Collection.class))) .thenReturn(Collections.emptyList()); - scheduledJobMonitor.getJobStatus(); + scheduledJobMonitor.updateJobStatus(); verify(jobInfoRepository, never()).save(any(JobInfo.class)); } - @Test - public void getJobMetrics_shouldPushToStatsDMetricPusherAndSaveNewMetricToDb() { - JobInfo job = - new JobInfo( - "jobId", - "extId1", - "Streaming", - "DataflowRunner", - "", - "", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - JobStatus.RUNNING, - ""); - - Metrics metric1 = new Metrics(job, "metric1", 1); - Metrics metric2 = new Metrics(job, "metric2", 2); - List metrics = Arrays.asList(metric1, metric2); - - when(jobInfoRepository.findByStatusNotIn((Collection) any(Collection.class))) - .thenReturn(Arrays.asList(job)); - when(jobMonitor.getJobMetrics(job)).thenReturn(metrics); - - scheduledJobMonitor.getJobMetrics(); - - verify(stasdMetricPusher).pushMetrics(metrics); - ArgumentCaptor argCaptor = ArgumentCaptor.forClass(JobInfo.class); - verify(jobInfoRepository).save(argCaptor.capture()); - - assertThat(job.getMetrics(), equalTo(metrics)); - } } diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java index 508492ef58a..a2200f805f1 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java @@ -19,27 +19,37 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import com.google.api.services.dataflow.Dataflow; import com.google.common.collect.Lists; -import feast.core.config.ImportJobDefaults; -import feast.core.util.PathUtil; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.ImportSpec; -import java.io.ByteArrayInputStream; +import com.google.protobuf.util.JsonFormat; +import com.google.protobuf.util.JsonFormat.Printer; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.config.FeastProperties.MetricsProperties; +import feast.core.exception.JobExecutionException; +import feast.ingestion.options.ImportOptions; import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.List; +import java.util.HashMap; +import java.util.Map; +import org.apache.beam.runners.dataflow.DataflowPipelineJob; +import org.apache.beam.runners.dataflow.DataflowRunner; +import org.apache.beam.sdk.PipelineResult.State; +import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import org.junit.rules.TemporaryFolder; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; @@ -49,99 +59,101 @@ public class DataflowJobManagerTest { public final ExpectedException expectedException = ExpectedException.none(); @Mock - Dataflow dataflow; + private Dataflow dataflow; - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - private ImportJobDefaults defaults; + private Map defaults; private DataflowJobManager dfJobManager; - private Path workspace; @Before - public void setUp() throws IOException { + public void setUp() { initMocks(this); - workspace = Paths.get(tempFolder.newFolder().toString()); - defaults = - ImportJobDefaults.builder() - .runner("DataflowRunner") - .importJobOptions("{\"key\":\"value\"}") - .executable("ingestion.jar") - .workspace(workspace.toString()).build(); - dfJobManager = new DataflowJobManager(dataflow, "project", "location", defaults); + defaults = new HashMap<>(); + defaults.put("project", "project"); + defaults.put("region", "region"); + MetricsProperties metricsProperties = new MetricsProperties(); + metricsProperties.setEnabled(false); + dfJobManager = new DataflowJobManager(dataflow, defaults, metricsProperties); + dfJobManager = spy(dfJobManager); } @Test - public void shouldBuildProcessBuilderWithCorrectOptions() { - ImportSpec importSpec = ImportSpec.newBuilder().setType("file").build(); - String jobName = "test"; - ImportJobSpecs importJobSpecs = ImportJobSpecs.newBuilder().setJobId(jobName) - .setImportSpec(importSpec).build(); - - ProcessBuilder pb = dfJobManager.getProcessBuilder(importJobSpecs, Paths.get("/tmp/foobar")); - List expected = - Lists.newArrayList( - "java", - "-jar", - "ingestion.jar", - "--jobName=test", - "--workspace=file:///tmp/foobar", - "--runner=DataflowRunner", - "--key=value"); - assertThat(pb.command(), equalTo(expected)); - } + public void shouldStartJobWithCorrectPipelineOptions() throws IOException { + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); - @Test - public void shouldBuildProcessBuilderWithGCSWorkspace() { - ImportSpec importSpec = ImportSpec.newBuilder().setType("file").build(); - String jobName = "test"; - ImportJobSpecs importJobSpecs = ImportJobSpecs.newBuilder().setJobId(jobName) - .setImportSpec(importSpec).build(); - - ProcessBuilder pb = dfJobManager.getProcessBuilder(importJobSpecs, PathUtil.getPath("gs://bucket/tmp/foobar")); - List expected = - Lists.newArrayList( - "java", - "-jar", - "ingestion.jar", - "--jobName=test", - "--workspace=gs://bucket/tmp/foobar", - "--runner=DataflowRunner", - "--key=value"); - assertThat(pb.command(), equalTo(expected)); - } + FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() + .setName("featureSet") + .setVersion(1) + .build(); - @Test - public void shouldRunProcessAndGetJobIdIfNoError() throws IOException { - Process process = Mockito.mock(Process.class); - String processOutput = "log1: asdds\nlog2: dasdasd\nlog3: FeastImportJobId:1231231231\n"; - String errorOutput = ""; - InputStream outputStream = - new ByteArrayInputStream(processOutput.getBytes(StandardCharsets.UTF_8)); - InputStream errorStream = - new ByteArrayInputStream(errorOutput.getBytes(StandardCharsets.UTF_8)); - when(process.getInputStream()).thenReturn(outputStream); - when(process.getErrorStream()).thenReturn(errorStream); - when(process.exitValue()).thenReturn(0); - when(process.isAlive()).thenReturn(true).thenReturn(false); - String jobId = dfJobManager.runProcess(process); - assertThat(jobId, equalTo("1231231231")); + Printer printer = JsonFormat.printer(); + String expectedExtJobId = "feast-job-0"; + String jobName = "job"; + + ImportOptions expectedPipelineOptions = PipelineOptionsFactory.fromArgs("") + .as(ImportOptions.class); + expectedPipelineOptions.setRunner(DataflowRunner.class); + expectedPipelineOptions.setProject("project"); + expectedPipelineOptions.setRegion("region"); + expectedPipelineOptions.setUpdate(false); + expectedPipelineOptions.setAppName("DataflowJobManager"); + expectedPipelineOptions.setJobName(jobName); + expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); + expectedPipelineOptions + .setFeatureSetSpecJson(Lists.newArrayList(printer.print(featureSetSpec))); + + ArgumentCaptor captor = ArgumentCaptor + .forClass(ImportOptions.class); + + DataflowPipelineJob mockPipelineResult = Mockito.mock(DataflowPipelineJob.class); + when(mockPipelineResult.getState()).thenReturn(State.RUNNING); + when(mockPipelineResult.getJobId()).thenReturn(expectedExtJobId); + + doReturn(mockPipelineResult).when(dfJobManager).runPipeline(any()); + String jobId = dfJobManager.startJob(jobName, Lists.newArrayList(featureSetSpec), store); + + verify(dfJobManager, times(1)).runPipeline(captor.capture()); + ImportOptions actualPipelineOptions = captor.getValue(); + + expectedPipelineOptions.setOptionsId(actualPipelineOptions.getOptionsId()); // avoid comparing this value + + // We only check that we are calling getFilesToStage() manually, because the automatic approach + // throws an error: https://github.com/gojek/feast/pull/291 i.e. do not check for the actual files that are staged + assertThat("filesToStage in pipelineOptions should not be null, job manager should set it.", actualPipelineOptions.getFilesToStage() != null); + assertThat("filesToStage in pipelineOptions should contain at least 1 item", actualPipelineOptions.getFilesToStage().size() > 0); + // Assume the files that are staged are correct + expectedPipelineOptions.setFilesToStage(actualPipelineOptions.getFilesToStage()); + + assertThat(actualPipelineOptions.toString(), + equalTo(expectedPipelineOptions.toString())); + assertThat(jobId, equalTo(expectedExtJobId)); } + @Test - public void shouldThrowRuntimeExceptionIfErrorOccursInProcess() { - Process process = Mockito.mock(Process.class); - String processOutput = "log1: asdds\nlog2: dasdasd\n"; - String errorOutput = "error: stacktrace"; - InputStream outputStream = - new ByteArrayInputStream(processOutput.getBytes(StandardCharsets.UTF_8)); - InputStream errorStream = - new ByteArrayInputStream(errorOutput.getBytes(StandardCharsets.UTF_8)); - when(process.getInputStream()).thenReturn(outputStream); - when(process.getErrorStream()).thenReturn(errorStream); - when(process.exitValue()).thenReturn(1); - when(process.isAlive()).thenReturn(true).thenReturn(false); - expectedException.expect(RuntimeException.class); - dfJobManager.runProcess(process); + public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); + + FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() + .setName("featureSet") + .setVersion(1) + .build(); + + dfJobManager = Mockito.spy(dfJobManager); + + DataflowPipelineJob mockPipelineResult = Mockito.mock(DataflowPipelineJob.class); + when(mockPipelineResult.getState()).thenReturn(State.FAILED); + + doReturn(mockPipelineResult).when(dfJobManager).runPipeline(any()); + + expectedException.expect(JobExecutionException.class); + dfJobManager.startJob("job", Lists.newArrayList(featureSetSpec), store); } } diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java index 3e1d3f89bf3..32cd663ae4b 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java @@ -28,9 +28,13 @@ import com.google.api.services.dataflow.Dataflow.Projects.Locations.Jobs; import com.google.api.services.dataflow.Dataflow.Projects.Locations.Jobs.Get; import com.google.api.services.dataflow.model.Job; +import com.google.common.collect.Lists; import feast.core.job.Runner; import feast.core.model.JobInfo; import feast.core.model.JobStatus; +import feast.types.FieldProto.Field; +import feast.types.ValueProto.BoolList; +import feast.types.ValueProto.Value; import java.io.IOException; import org.junit.Before; import org.junit.Test; @@ -101,4 +105,14 @@ public void getJobStatus_shouldReturnUnknownStateWhenExceptionHappen() throws IO when(jobInfo.getRunner()).thenReturn(Runner.DATAFLOW.getName()); assertThat(monitor.getJobStatus(jobInfo), equalTo(JobStatus.UNKNOWN)); } + + @Test + public void test() { + Field field = Field.newBuilder() + .setName("Hello") + .setValue(Value.newBuilder().setBoolListVal(BoolList.newBuilder().addAllVal( + Lists.newArrayList(true,false,true,true)).build())) + .build(); + field.getName(); + } } \ No newline at end of file diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java new file mode 100644 index 00000000000..2dbd53105e0 --- /dev/null +++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java @@ -0,0 +1,114 @@ +package feast.core.job.direct; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.Lists; +import com.google.protobuf.util.JsonFormat; +import com.google.protobuf.util.JsonFormat.Printer; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.config.FeastProperties.MetricsProperties; +import feast.ingestion.options.ImportOptions; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import org.apache.beam.runners.direct.DirectRunner; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; + +public class DirectRunnerJobManagerTest { + @Rule + public final ExpectedException expectedException = ExpectedException.none(); + + @Mock + private DirectJobRegistry directJobRegistry; + + private DirectRunnerJobManager drJobManager; + private Map defaults; + + @Before + public void setUp() { + initMocks(this); + defaults = new HashMap<>(); + MetricsProperties metricsProperties = new MetricsProperties(); + metricsProperties.setEnabled(false); + + drJobManager = new DirectRunnerJobManager(defaults, directJobRegistry, metricsProperties); + drJobManager = Mockito.spy(drJobManager); + } + + @Test + public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); + + FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() + .setName("featureSet") + .setVersion(1) + .build(); + + Printer printer = JsonFormat.printer(); + + ImportOptions expectedPipelineOptions = PipelineOptionsFactory.fromArgs("") + .as(ImportOptions.class); + expectedPipelineOptions.setAppName("DirectRunnerJobManager"); + expectedPipelineOptions.setRunner(DirectRunner.class); + expectedPipelineOptions.setBlockOnRun(false); + expectedPipelineOptions.setProject(""); + expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); + expectedPipelineOptions.setProject(""); + expectedPipelineOptions + .setFeatureSetSpecJson(Lists.newArrayList(printer.print(featureSetSpec))); + + String expectedJobId = "feast-job-0"; + ArgumentCaptor pipelineOptionsCaptor = ArgumentCaptor + .forClass(ImportOptions.class); + ArgumentCaptor directJobCaptor = ArgumentCaptor + .forClass(DirectJob.class); + + PipelineResult mockPipelineResult = Mockito.mock(PipelineResult.class); + doReturn(mockPipelineResult).when(drJobManager).runPipeline(any()); + + String jobId = drJobManager.startJob(expectedJobId, Lists.newArrayList(featureSetSpec), store); + verify(drJobManager, times(1)).runPipeline(pipelineOptionsCaptor.capture()); + verify(directJobRegistry, times(1)).add(directJobCaptor.capture()); + + ImportOptions actualPipelineOptions = pipelineOptionsCaptor.getValue(); + DirectJob jobStarted = directJobCaptor.getValue(); + expectedPipelineOptions.setOptionsId(actualPipelineOptions.getOptionsId()); // avoid comparing this value + + assertThat(actualPipelineOptions.toString(), + equalTo(expectedPipelineOptions.toString())); + assertThat(jobStarted.getPipelineResult(), equalTo(mockPipelineResult)); + assertThat(jobStarted.getJobId(), equalTo(expectedJobId)); + assertThat(jobId, equalTo(expectedJobId)); + } + + @Test + public void shouldAbortJobThenRemoveFromRegistry() throws IOException { + DirectJob job = Mockito.mock(DirectJob.class); + when(directJobRegistry.get("job")).thenReturn(job); + drJobManager.abortJob("job"); + verify(job, times(1)).abort(); + verify(directJobRegistry, times(1)).remove("job"); + } +} \ No newline at end of file diff --git a/core/src/test/java/feast/core/job/flink/FlinkJobManagerTest.java b/core/src/test/java/feast/core/job/flink/FlinkJobManagerTest.java deleted file mode 100644 index 35faaf2782d..00000000000 --- a/core/src/test/java/feast/core/job/flink/FlinkJobManagerTest.java +++ /dev/null @@ -1,118 +0,0 @@ -package feast.core.job.flink; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import feast.core.config.ImportJobDefaults; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.ImportSpec; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import org.apache.flink.client.cli.CliFrontend; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class FlinkJobManagerTest { - - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - @Mock - private CliFrontend flinkCli; - @Mock - private FlinkRestApi flinkRestApi; - private FlinkJobConfig config; - private ImportJobDefaults defaults; - private FlinkJobManager flinkJobManager; - private Path workspace; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - config = new FlinkJobConfig("localhost:8081", "/etc/flink/conf"); - workspace = Paths.get(tempFolder.newFolder().toString()); - defaults = - ImportJobDefaults.builder(). - runner("FlinkRunner").importJobOptions("{\"key\":\"value\"}") - .executable("ingestion.jar") - .workspace(workspace.toString()).build(); - - flinkJobManager = new FlinkJobManager(flinkCli, config, flinkRestApi, defaults); - } - - @Test - public void shouldPassCorrectArgumentForSubmittingJob() throws IOException { - FlinkJobList response = new FlinkJobList(); - response.setJobs(Collections.singletonList(new FlinkJob("1234", "job1", "RUNNING"))); - when(flinkRestApi.getJobsOverview()).thenReturn(response); - - ImportSpec importSpec = ImportSpec.newBuilder().setType("file").build(); - String jobName = "importjob"; - ImportJobSpecs importJobSpecs = ImportJobSpecs.newBuilder().setJobId(jobName) - .setImportSpec(importSpec).build(); - - flinkJobManager.submitJob(importJobSpecs, Paths.get("/tmp/foobar")); - String[] expected = - new String[]{ - "run", - "-d", - "-m", - config.getMasterUrl(), - defaults.getExecutable(), - "--jobName=" + jobName, - "--runner=FlinkRunner", - "--workspace=/tmp/foobar", - "--key=value" - }; - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(String[].class); - verify(flinkCli).parseParameters(argumentCaptor.capture()); - - String[] actual = argumentCaptor.getValue(); - assertThat(actual, equalTo(expected)); - } - - @Test - public void shouldReturnFlinkJobId() { - FlinkJobList response = new FlinkJobList(); - String flinkJobId = "1234"; - String jobName = "importjob"; - response.setJobs(Collections.singletonList(new FlinkJob(flinkJobId, jobName, "RUNNING"))); - when(flinkRestApi.getJobsOverview()).thenReturn(response); - - ImportSpec importSpec = ImportSpec.newBuilder().setType("file").build(); - ImportJobSpecs importJobSpecs = ImportJobSpecs.newBuilder().setJobId(jobName) - .setImportSpec(importSpec).build(); - String jobId = flinkJobManager.submitJob(importJobSpecs, workspace); - - assertThat(jobId, equalTo(flinkJobId)); - } - - @Test - public void shouldPassCorrectArgumentForStoppingJob() { - String jobId = "1234"; - - flinkJobManager.abortJob(jobId); - - String[] expected = new String[]{ - "cancel", - "-m", - config.getMasterUrl(), - jobId - }; - - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(String[].class); - verify(flinkCli).parseParameters(argumentCaptor.capture()); - - String[] actual = argumentCaptor.getValue(); - assertThat(actual, equalTo(expected)); - } -} diff --git a/core/src/test/java/feast/core/job/flink/FlinkRestApiTest.java b/core/src/test/java/feast/core/job/flink/FlinkRestApiTest.java deleted file mode 100644 index 023791d2ee9..00000000000 --- a/core/src/test/java/feast/core/job/flink/FlinkRestApiTest.java +++ /dev/null @@ -1,113 +0,0 @@ -package feast.core.job.flink; - -import static org.hamcrest.core.IsEqual.equalTo; -import static org.junit.Assert.assertThat; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.util.Arrays; -import okhttp3.HttpUrl; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import okhttp3.mockwebserver.RecordedRequest; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.web.client.RestTemplate; - -public class FlinkRestApiTest { - FlinkRestApi flinkRestApi; - MockWebServer mockWebServer; - - String host; - int port; - - @Before - public void setUp() throws Exception { - mockWebServer = new MockWebServer(); - mockWebServer.start(); - - port = mockWebServer.getPort(); - host = mockWebServer.getHostName(); - - flinkRestApi = new FlinkRestApi(new RestTemplate(), String.format("%s:%d", host, port)); - } - - @Test - public void shouldSendCorrectRequest() throws InterruptedException { - MockResponse response = new MockResponse(); - response.setResponseCode(200); - mockWebServer.enqueue(response); - - flinkRestApi.getJobsOverview(); - - RecordedRequest recordedRequest = mockWebServer.takeRequest(); - HttpUrl requestUrl = recordedRequest.getRequestUrl(); - - assertThat(requestUrl.host(), equalTo(host)); - assertThat(requestUrl.port(), equalTo(port)); - assertThat(requestUrl.encodedPath(), equalTo("/jobs/overview")); - } - - @Test - public void shouldReturnEmptyJobListForEmptyBody() { - MockResponse response = new MockResponse(); - response.setResponseCode(200); - mockWebServer.enqueue(response); - - FlinkJobList jobList = flinkRestApi.getJobsOverview(); - assertThat(jobList.getJobs().size(), equalTo(0)); - } - - @Test - public void shouldReturnEmptyJobListForEmptyJsonResponse() { - mockWebServer.enqueue(createMockResponse(200, "[]")); - - FlinkJobList jobList = flinkRestApi.getJobsOverview(); - assertThat(jobList.getJobs().size(), equalTo(0)); - - mockWebServer.enqueue(createMockResponse(200, "{}")); - - jobList = flinkRestApi.getJobsOverview(); - assertThat(jobList.getJobs().size(), equalTo(0)); - - mockWebServer.enqueue(createMockResponse(200, "{jobs: []}")); - - jobList = flinkRestApi.getJobsOverview(); - assertThat(jobList.getJobs().size(), equalTo(0)); - } - - @Test - public void shouldReturnCorrectResultForValidResponse() throws JsonProcessingException { - FlinkJobList jobList = new FlinkJobList(); - FlinkJob job1 = new FlinkJob("1234", "job1", "RUNNING"); - FlinkJob job2 = new FlinkJob("5678", "job2", "RUNNING"); - FlinkJob job3 = new FlinkJob("1111", "job3", "RUNNING"); - - jobList.setJobs(Arrays.asList(job1, job2, job3)); - - mockWebServer.enqueue( createMockResponse(200, createResponseBody(jobList))); - - FlinkJobList actual = flinkRestApi.getJobsOverview(); - - assertThat(actual.getJobs().size(), equalTo(3)); - } - - @After - public void tearDown() throws Exception { - mockWebServer.shutdown(); - } - - private String createResponseBody(FlinkJobList jobList) throws JsonProcessingException { - ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.writeValueAsString(jobList); - } - - private MockResponse createMockResponse(int statusCode, String body) { - MockResponse response = new MockResponse(); - response.setHeader("Content-Type", "application/json"); - response.setResponseCode(statusCode); - response.setBody(body); - return response; - } -} diff --git a/core/src/test/java/feast/core/model/EntityInfoTest.java b/core/src/test/java/feast/core/model/EntityInfoTest.java deleted file mode 100644 index 9554c577016..00000000000 --- a/core/src/test/java/feast/core/model/EntityInfoTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import com.google.api.client.util.Lists; -import com.google.protobuf.Timestamp; -import org.junit.Before; -import org.junit.Test; -import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; -import feast.specs.EntitySpecProto.EntitySpec; - -import java.time.Instant; -import java.util.Date; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; - -public class EntityInfoTest { - private EntityInfo entityInfo; - private EntitySpec entitySpec; - - @Before - public void setUp() { - entityInfo = new EntityInfo(); - entityInfo.setName("test"); - entityInfo.setDescription("description"); - entityInfo.setTags("tag1,tag2"); - - entitySpec = - EntitySpec.newBuilder() - .setName("test") - .setDescription("description") - .addTags("tag1") - .addTags("tag2") - .build(); - } - - @Test - public void shouldBuildAndReturnCorrespondingSpec() { - assertThat(entityInfo.getEntitySpec(), equalTo(entitySpec)); - } - - @Test - public void shouldCorrectlyInitialiseFromGivenSpec() { - assertThat(new EntityInfo(entitySpec), equalTo(entityInfo)); - } - - @Test - public void shouldBuildAndReturnCorrespondingDetail() { - entityInfo.setLastUpdated(new Date(1000)); - Timestamp ts = Timestamp.newBuilder().setSeconds(1).build(); - EntityDetail expected = - EntityDetail.newBuilder().setSpec(entitySpec).setLastUpdated(ts).build(); - assertThat(entityInfo.getEntityDetail(), equalTo(expected)); - } - - @Test - public void shouldUpdateTagAndDescription() { - EntityInfo entityInfo = new EntityInfo("entity", "test entity", "tag1,tag2", Lists.newArrayList(), false); - EntitySpec update = EntitySpec.newBuilder().setName("entity").setDescription("overwrite").addTags("newtag").build(); - EntityInfo expected = new EntityInfo("entity", "overwrite", "newtag", Lists.newArrayList(), false); - entityInfo.update(update); - assertThat(entityInfo, equalTo(expected)); - } -} diff --git a/core/src/test/java/feast/core/model/FeatureGroupInfoTest.java b/core/src/test/java/feast/core/model/FeatureGroupInfoTest.java deleted file mode 100644 index b9f8d007c58..00000000000 --- a/core/src/test/java/feast/core/model/FeatureGroupInfoTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; - -import com.google.protobuf.Timestamp; -import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import java.util.Date; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class FeatureGroupInfoTest { - - @Rule - public final ExpectedException exception = ExpectedException.none(); - private FeatureGroupInfo featureGroupInfo; - private FeatureGroupSpec featureGroupSpec; - private StorageInfo servingStorage; - private StorageInfo warehouseStorage; - - @Before - public void setUp() { - servingStorage = new StorageInfo(); - servingStorage.setId("REDIS1"); - - warehouseStorage = new StorageInfo(); - warehouseStorage.setId("REDIS2"); - - featureGroupInfo = new FeatureGroupInfo(); - featureGroupInfo.setId("test"); - featureGroupInfo.setTags("tag1,tag2"); - - featureGroupInfo.setOptions("{\"foo\":\"bar\"}"); - featureGroupSpec = - FeatureGroupSpec.newBuilder() - .setId("test") - .addTags("tag1") - .addTags("tag2") - .putOptions("foo", "bar") - .build(); - } - - @Test - public void shouldBuildAndReturnCorrespondingSpec() { - assertThat(featureGroupInfo.getFeatureGroupSpec(), equalTo(featureGroupSpec)); - } - - @Test - public void shouldCorrectlyInitialiseFromGivenSpec() { - assertThat( - new FeatureGroupInfo(featureGroupSpec), - equalTo(featureGroupInfo)); - } - - @Test - public void shouldBuildAndReturnCorrespondingDetail() { - featureGroupInfo.setLastUpdated(new Date(1000)); - Timestamp ts = Timestamp.newBuilder().setSeconds(1).build(); - FeatureGroupDetail expected = - FeatureGroupDetail.newBuilder().setSpec(featureGroupSpec).setLastUpdated(ts).build(); - assertThat(featureGroupInfo.getFeatureGroupDetail(), equalTo(expected)); - } - - @Test - public void shouldUpdateTags() { - FeatureGroupSpec update = - FeatureGroupSpec.newBuilder() - .setId("test") - .addTags("newtag") - .putOptions("foo", "bar") - .build(); - featureGroupInfo.update(update); - - FeatureGroupInfo expected = new FeatureGroupInfo(update); - assertThat(featureGroupInfo, equalTo(expected)); - } - - @Test - public void shouldThrowErrorIfOptionsChanged() { - FeatureGroupSpec update = - FeatureGroupSpec.newBuilder() - .setId("test") - .addTags("newtag") - .putOptions("new", "option") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Feature group already exists. Update only allowed for fields: [tags]"); - featureGroupInfo.update(update); - } -} diff --git a/core/src/test/java/feast/core/model/FeatureInfoTest.java b/core/src/test/java/feast/core/model/FeatureInfoTest.java deleted file mode 100644 index 5b173bd6f21..00000000000 --- a/core/src/test/java/feast/core/model/FeatureInfoTest.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; - -import com.google.protobuf.Timestamp; -import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import java.util.Date; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class FeatureInfoTest { - - @Rule - public final ExpectedException exception = ExpectedException.none(); - private FeatureInfo featureInfo; - private FeatureSpec featureSpec; - private EntityInfo entityInfo; - private StorageInfo servingStorage; - private StorageInfo warehouseStorage; - - @Before - public void setUp() { - entityInfo = new EntityInfo(); - entityInfo.setName("entity"); - - featureInfo = new FeatureInfo(); - featureInfo.setId("entity.name"); - featureInfo.setName("name"); - featureInfo.setOwner("owner"); - featureInfo.setDescription("desc"); - featureInfo.setUri("uri"); - featureInfo.setValueType(ValueType.Enum.BYTES); - featureInfo.setEntity(entityInfo); - featureInfo.setOptions("{}"); - featureInfo.setTags("tag1,tag2"); - - servingStorage = new StorageInfo(); - servingStorage.setId("REDIS1"); - - warehouseStorage = new StorageInfo(); - warehouseStorage.setId("BIGQUERY"); - warehouseStorage.setType("bigquery"); - - featureSpec = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("desc") - .setEntity("entity") - .setUri("uri") - .setValueType(ValueType.Enum.BYTES) - .addTags("tag1") - .addTags("tag2") - .build(); - } - - @Test - public void shouldBuildAndReturnCorrespondingSpec() { - assertThat(featureInfo.getFeatureSpec(), equalTo(featureSpec)); - } - - @Test - public void shouldCorrectlyInitialiseFromGivenSpec() { - assertThat( - new FeatureInfo(featureSpec, entityInfo, null), - equalTo(featureInfo)); - } - - @Test - public void shouldBuildAndReturnCorrespondingDetail() { - featureInfo.setLastUpdated(new Date(1000)); - featureInfo.setCreated(new Date(1000)); - featureInfo.setBigQueryView("bqviewurl"); - Timestamp ts = Timestamp.newBuilder().setSeconds(1).build(); - FeatureDetail expected = - FeatureDetail.newBuilder() - .setSpec(featureSpec) - .setBigqueryView("bqviewurl") - .setEnabled(true) - .setLastUpdated(ts) - .setCreated(ts) - .build(); - assertThat(featureInfo.getFeatureDetail(StorageSpecs.builder().build()), equalTo(expected)); - } - - @Test - public void shouldBuildCorrespondingResolvedSpec() { - FeatureGroupInfo featureGroupInfo = new FeatureGroupInfo(); - featureGroupInfo.setId("testGroup"); - featureGroupInfo.setTags("inherited"); - FeatureInfo featureInfo = new FeatureInfo(); - featureInfo.setId("entity.name"); - featureInfo.setName("name"); - featureInfo.setOwner("owner"); - featureInfo.setDescription("desc"); - featureInfo.setUri("uri"); - featureInfo.setValueType(ValueType.Enum.BYTES); - featureInfo.setEntity(entityInfo); - featureInfo.setOptions("{}"); - featureInfo.setTags("tag1,tag2"); - featureInfo.setFeatureGroup(featureGroupInfo); - - FeatureSpec expected = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("desc") - .setEntity("entity") - .setUri("uri") - .setGroup("testGroup") - .setValueType(ValueType.Enum.BYTES) - .addTags("tag1") - .addTags("tag2") - .addTags("inherited") - .build(); - FeatureInfo resolved = featureInfo.resolve(); - assertThat(resolved.getFeatureSpec(), equalTo(expected)); - } - - @Test - public void shouldUpdateMutableFields() { - FeatureSpec update = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner2") - .setDescription("overwrite") - .setEntity("entity") - .setUri("new_uri") - .setValueType(ValueType.Enum.BYTES) - .addTags("new_tag") - .build(); - featureInfo.update(featureSpec); - FeatureInfo expected = - new FeatureInfo(update, entityInfo, null); - assertThat(featureInfo, equalTo(expected)); - } - - @Test - public void shouldThrowExceptionIfImmutableFieldsChanged() { - FeatureSpec update = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner2") - .setDescription("overwrite") - .setEntity("entity") - .setUri("new_uri") - .setValueType(ValueType.Enum.INT32) - .addTags("new_tag") - .build(); - - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Feature already exists. Update only allowed for fields: [owner, description, uri, tags]"); - featureInfo.update(update); - } - - - @Test - public void shouldThrowExceptionIfImmutableFieldsChangedToNull() { - FeatureSpec update = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner2") - .setDescription("overwrite") - .setEntity("entity") - .setUri("new_uri") - //.setValueType() - .addTags("new_tag") - .build(); - - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Feature already exists. Update only allowed for fields: [owner, description, uri, tags]"); - featureInfo.update(update); - } - - @Test - public void createBigQueryLink_withBigQueryType_shouldGenerateLink() { - String link = featureInfo.createBigqueryViewLink(StorageSpec.newBuilder() - .setType("bigquery").setId("BQ").putOptions("project", "project1") - .putOptions("dataset", "dataset1").build()); - assertEquals(link, - "https://bigquery.cloud.google.com/table/project1:dataset1.entity_view"); - } - - @Test - public void createBigQueryLink_withOtherType_shouldNotGenerateLink() { - String link = featureInfo.createBigqueryViewLink(StorageSpec.newBuilder() - .setType("another_type").build()); - assertEquals(link, "N.A."); - } - - @Test - public void createBigQueryLink_withNullSpec_shouldNotGenerateLink() { - String link = featureInfo.createBigqueryViewLink(null); - assertEquals(link, "N.A."); - } -} diff --git a/core/src/test/java/feast/core/model/JobInfoTest.java b/core/src/test/java/feast/core/model/JobInfoTest.java deleted file mode 100644 index f47d8435f83..00000000000 --- a/core/src/test/java/feast/core/model/JobInfoTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.core.util.TypeConversion; -import feast.specs.ImportSpecProto; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; - -public class JobInfoTest { - @Test - public void shouldInitialiseGivenJobIdAndSpec() throws InvalidProtocolBufferException { - ImportSpecProto.Schema schema = ImportSpecProto.Schema.newBuilder() - .setEntityIdColumn("entity") - .setTimestampColumn("timestamp") - .addFields(ImportSpecProto.Field.newBuilder().setName("entity").build()) - .addFields(ImportSpecProto.Field.newBuilder().setName("timestamp").build()) - .addFields(ImportSpecProto.Field.newBuilder().setName("feature").setFeatureId("feature").build()) - .build(); - - ImportSpecProto.ImportSpec importSpec = ImportSpecProto.ImportSpec.newBuilder() - .setType("file.csv") - .putSourceOptions("path", "gs://some/path") - .addEntities("entity") - .setSchema(schema) - .build(); - - JobInfo actual = new JobInfo("fake-job-id", "fake-ext-id", "DataflowRunner",importSpec, JobStatus.PENDING); - JobInfo expected = new JobInfo(); - expected.setId("fake-job-id"); - expected.setExtId("fake-ext-id"); - expected.setType("file.csv"); - expected.setRunner("DataflowRunner"); - expected.setSourceOptions(TypeConversion.convertMapToJsonString(importSpec.getSourceOptionsMap())); - - List entities = new ArrayList<>(); - EntityInfo entityInfo = new EntityInfo(); - entityInfo.setName("entity"); - entities.add(entityInfo); - expected.setEntities(entities); - - List features = new ArrayList<>(); - FeatureInfo featureInfo = new FeatureInfo(); - featureInfo.setName("feature"); - features.add(featureInfo); - expected.setFeatures(features); - - expected.setRaw(JsonFormat.printer().print(importSpec)); - assertThat(actual.getId(), equalTo(expected.getId())); - assertThat(actual.getExtId(), equalTo(expected.getExtId())); - assertThat(actual.getType(), equalTo(expected.getType())); - assertThat(actual.getRunner(), equalTo(expected.getRunner())); - assertThat(actual.getEntities(), equalTo(expected.getEntities())); - assertThat(actual.getFeatures(), equalTo(expected.getFeatures())); - assertThat(actual.getSourceOptions(), equalTo(expected.getSourceOptions())); - assertThat(actual.getRaw(), equalTo(expected.getRaw())); - } -} \ No newline at end of file diff --git a/core/src/test/java/feast/core/model/StorageInfoTest.java b/core/src/test/java/feast/core/model/StorageInfoTest.java deleted file mode 100644 index 7c4e7134411..00000000000 --- a/core/src/test/java/feast/core/model/StorageInfoTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.model; - -import com.google.protobuf.Timestamp; -import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; -import feast.specs.StorageSpecProto.StorageSpec; -import org.junit.Before; -import org.junit.Test; - -import java.time.Instant; -import java.util.Date; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; - -public class StorageInfoTest { - private StorageInfo storageInfo; - private StorageSpec storageSpec; - - @Before - public void setUp() { - storageInfo = new StorageInfo(); - storageInfo.setId("REDIS1"); - storageInfo.setType("redis"); - storageInfo.setOptions("{\"option1\":\"value\"}"); - - storageSpec = - StorageSpec.newBuilder() - .setId("REDIS1") - .setType("redis") - .putOptions("option1", "value") - .build(); - } - - @Test - public void shouldBuildAndReturnCorrespondingSpec() { - assertThat(storageInfo.getStorageSpec(), equalTo(storageSpec)); - } - - @Test - public void shouldCorrectlyInitialiseFromGivenSpec() { - assertThat(new StorageInfo(storageSpec), equalTo(storageInfo)); - } - - @Test - public void shouldBuildAndReturnCorrespondingDetail() { - storageInfo.setLastUpdated(new Date(1000)); - Timestamp ts = Timestamp.newBuilder().setSeconds(1).build(); - StorageDetail expected = - StorageDetail.newBuilder().setSpec(storageSpec).setLastUpdated(ts).build(); - assertThat(storageInfo.getStorageDetail(), equalTo(expected)); - } -} diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java new file mode 100644 index 00000000000..47d3e8486da --- /dev/null +++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java @@ -0,0 +1,152 @@ +package feast.core.service; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.Lists; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.SourceProto; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.SourceType; +import feast.core.StoreProto; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.dao.JobInfoRepository; +import feast.core.job.JobManager; +import feast.core.job.Runner; +import feast.core.model.FeatureSet; +import feast.core.model.JobInfo; +import feast.core.model.JobStatus; +import feast.core.model.Source; +import feast.core.model.Store; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; + +public class JobCoordinatorServiceTest { + + @Rule + public final ExpectedException exception = ExpectedException.none(); + @Mock + JobInfoRepository jobInfoRepository; + @Mock + JobManager jobManager; + + private JobCoordinatorService jobCoordinatorService; + private JobInfo existingJob; + private Source defaultSource; + + @Before + public void setUp() { + initMocks(this); + + Store store = Store.fromProto(StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build()); + defaultSource = new Source(SourceType.KAFKA, + KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092").setTopic("feast-topic") + .build(), true); + FeatureSet featureSet1 = new FeatureSet(); + featureSet1.setId("featureSet1:1"); + featureSet1.setSource(defaultSource); + FeatureSet featureSet2 = new FeatureSet(); + featureSet2.setId("featureSet2:1"); + featureSet2.setSource(defaultSource); + existingJob = new JobInfo("extid", "name", "DirectRunner", defaultSource, store, + Lists.newArrayList(featureSet1, featureSet2), Lists.newArrayList(), + JobStatus.RUNNING); + when(jobInfoRepository.findBySourceIdAndStoreName(defaultSource.getId(), "SERVING")) + .thenReturn(Lists.newArrayList(existingJob)); + + jobCoordinatorService = new JobCoordinatorService(jobInfoRepository, jobManager); + jobCoordinatorService = spy(jobCoordinatorService); + } + + @Test + public void shouldNotStartOrUpdateJobIfNoChanges() { + FeatureSetSpec featureSet1 = FeatureSetSpec.newBuilder() + .setName("featureSet1") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + FeatureSetSpec featureSet2 = FeatureSetSpec.newBuilder() + .setName("featureSet2") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); + JobInfo jobInfo = jobCoordinatorService + .startOrUpdateJob(Lists.newArrayList(featureSet1, featureSet2), + defaultSource.toProto(), store); + assertThat(jobInfo, equalTo(existingJob)); + } + + @Test + public void shouldStartJobIfNotExists() { + FeatureSetSpec featureSet = FeatureSetSpec.newBuilder() + .setName("featureSet") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); + String jobId = "featureSet-to-SERVING"; + String extJobId = "extId123"; + when(jobCoordinatorService.createJobId("featureSet", "SERVING")) + .thenReturn(jobId); + when(jobManager.startJob(jobId, Lists.newArrayList(featureSet), store)) + .thenReturn(extJobId); + when(jobManager.getRunnerType()).thenReturn(Runner.DIRECT); + FeatureSet expectedFeatureSet = new FeatureSet(); + expectedFeatureSet.setId("featureSet:1"); + JobInfo expectedJobInfo = new JobInfo(jobId, extJobId, "DirectRunner", + defaultSource, Store.fromProto(store), Lists.newArrayList(expectedFeatureSet), + JobStatus.RUNNING); + when(jobInfoRepository.save(expectedJobInfo)).thenReturn(expectedJobInfo); + JobInfo jobInfo = jobCoordinatorService + .startOrUpdateJob(Lists.newArrayList(featureSet), defaultSource.toProto(), + store); + assertThat(jobInfo, equalTo(expectedJobInfo)); + } + + @Test + public void shouldUpdateJobIfAlreadyExistsButThereIsAChange() { + FeatureSetSpec featureSet = FeatureSetSpec.newBuilder() + .setName("featureSet1") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); + String extId = "extId123"; + JobInfo modifiedJob = new JobInfo(existingJob.getId(), existingJob.getExtId(), + existingJob.getRunner(), defaultSource, Store.fromProto(store), + Lists.newArrayList(FeatureSet.fromProto(featureSet)), JobStatus.RUNNING); + when(jobManager.updateJob(modifiedJob)).thenReturn(extId); + JobInfo expectedJobInfo = modifiedJob; + expectedJobInfo.setExtId(extId); + when(jobInfoRepository.save(expectedJobInfo)).thenReturn(expectedJobInfo); + JobInfo jobInfo = jobCoordinatorService + .startOrUpdateJob(Lists.newArrayList(featureSet), defaultSource.toProto(), + store); + assertThat(jobInfo, equalTo(expectedJobInfo)); + } + +} \ No newline at end of file diff --git a/core/src/test/java/feast/core/service/JobManagementServiceTest.java b/core/src/test/java/feast/core/service/JobManagementServiceTest.java deleted file mode 100644 index 71967f8da88..00000000000 --- a/core/src/test/java/feast/core/service/JobManagementServiceTest.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.service; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.MockitoAnnotations.initMocks; - -import com.google.common.collect.Lists; -import com.google.protobuf.Timestamp; -import feast.core.JobServiceProto.JobServiceTypes.JobDetail; -import feast.core.config.ImportJobDefaults; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.JobInfoRepository; -import feast.core.dao.MetricsRepository; -import feast.core.exception.RetrievalException; -import feast.core.job.JobManager; -import feast.core.model.JobInfo; -import feast.core.model.JobStatus; -import feast.specs.StorageSpecProto.StorageSpec; -import java.time.Instant; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; - -public class JobManagementServiceTest { - - @Rule - public final ExpectedException exception = ExpectedException.none(); - @Mock - private JobInfoRepository jobInfoRepository; - @Mock - private MetricsRepository metricsRepository; - @Mock - private JobManager jobManager; - private ImportJobDefaults defaults; - @Mock - private SpecService specService; - private StorageSpecs storageSpecs; - - @Before - public void setUp() { - initMocks(this); - defaults = - ImportJobDefaults.builder() - .runner("DirectRunner").executable("/feast-import.jar").build(); - storageSpecs = StorageSpecs.builder() - .errorsStorageSpec(StorageSpec.newBuilder().setType("stderr").build()).build(); - } - - @Test - public void shouldListAllJobDetails() { - JobInfo jobInfo1 = - new JobInfo( - "job1", - "", - "", - "", - "", - "", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - JobStatus.PENDING, - ""); - jobInfo1.setCreated(Date.from(Instant.ofEpochSecond(1))); - jobInfo1.setLastUpdated(Date.from(Instant.ofEpochSecond(1))); - JobInfo jobInfo2 = - new JobInfo( - "job2", - "", - "", - "", - "", - "", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - JobStatus.PENDING, - ""); - jobInfo2.setCreated(Date.from(Instant.ofEpochSecond(1))); - jobInfo2.setLastUpdated(Date.from(Instant.ofEpochSecond(1))); - when(jobInfoRepository.findAll()).thenReturn(Lists.newArrayList(jobInfo1, jobInfo2)); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, - specService, storageSpecs); - List actual = jobManagementService.listJobs(); - List expected = - Lists.newArrayList( - JobDetail.newBuilder() - .setId("job1") - .setStatus("PENDING") - .setCreated(Timestamp.newBuilder().setSeconds(1).build()) - .setLastUpdated(Timestamp.newBuilder().setSeconds(1).build()) - .build(), - JobDetail.newBuilder() - .setId("job2") - .setStatus("PENDING") - .setCreated(Timestamp.newBuilder().setSeconds(1).build()) - .setLastUpdated(Timestamp.newBuilder().setSeconds(1).build()) - .build()); - assertThat(actual, equalTo(expected)); - } - - @Test - public void shouldReturnDetailOfRequestedJobId() { - JobInfo jobInfo1 = - new JobInfo( - "job1", - "", - "", - "", - "", - "", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - JobStatus.PENDING, - ""); - jobInfo1.setCreated(Date.from(Instant.ofEpochSecond(1))); - jobInfo1.setLastUpdated(Date.from(Instant.ofEpochSecond(1))); - when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(jobInfo1)); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, - specService, storageSpecs); - JobDetail actual = jobManagementService.getJob("job1"); - JobDetail expected = - JobDetail.newBuilder() - .setId("job1") - .setStatus("PENDING") - .setCreated(Timestamp.newBuilder().setSeconds(1).build()) - .setLastUpdated(Timestamp.newBuilder().setSeconds(1).build()) - .build(); - assertThat(actual, equalTo(expected)); - } - - @Test - public void shouldThrowErrorIfJobIdNotFoundWhenGettingJob() { - when(jobInfoRepository.findById("job1")).thenReturn(Optional.empty()); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - exception.expect(RetrievalException.class); - exception.expectMessage("Unable to retrieve job with id job1"); - jobManagementService.getJob("job1"); - } - - @Test - public void shouldThrowErrorIfJobIdNotFoundWhenAbortingJob() { - when(jobInfoRepository.findById("job1")).thenReturn(Optional.empty()); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - exception.expect(RetrievalException.class); - exception.expectMessage("Unable to retrieve job with id job1"); - jobManagementService.abortJob("job1"); - } - - @Test - public void shouldThrowErrorIfJobInTerminalStateWhenAbortingJob() { - JobInfo job = new JobInfo(); - job.setStatus(JobStatus.COMPLETED); - when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(job)); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - exception.expect(IllegalStateException.class); - exception.expectMessage("Unable to stop job already in terminal state"); - jobManagementService.abortJob("job1"); - } - - @Test - public void shouldUpdateJobAfterAborting() { - JobInfo job = new JobInfo(); - job.setStatus(JobStatus.RUNNING); - job.setExtId("extId1"); - when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(job)); - JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - jobManagementService.abortJob("job1"); - ArgumentCaptor jobCapture = ArgumentCaptor.forClass(JobInfo.class); - verify(jobInfoRepository).saveAndFlush(jobCapture.capture()); - assertThat(jobCapture.getValue().getStatus(), equalTo(JobStatus.ABORTING)); - } - - @Test - public void shouldUpdateJobStatusIfExists() { - JobInfo jobInfo = new JobInfo(); - when(jobInfoRepository.findById("jobid")).thenReturn(Optional.of(jobInfo)); - - ArgumentCaptor jobInfoArgumentCaptor = ArgumentCaptor.forClass(JobInfo.class); - JobManagementService jobExecutionService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - jobExecutionService.updateJobStatus("jobid", JobStatus.PENDING); - - verify(jobInfoRepository, times(1)).save(jobInfoArgumentCaptor.capture()); - - JobInfo jobInfoUpdated = new JobInfo(); - jobInfoUpdated.setStatus(JobStatus.PENDING); - assertThat(jobInfoArgumentCaptor.getValue(), equalTo(jobInfoUpdated)); - } - - @Test - public void shouldUpdateJobExtIdIfExists() { - JobInfo jobInfo = new JobInfo(); - when(jobInfoRepository.findById("jobid")).thenReturn(Optional.of(jobInfo)); - - ArgumentCaptor jobInfoArgumentCaptor = ArgumentCaptor.forClass(JobInfo.class); - JobManagementService jobExecutionService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); - jobExecutionService.updateJobExtId("jobid", "extid"); - - verify(jobInfoRepository, times(1)).save(jobInfoArgumentCaptor.capture()); - - JobInfo jobInfoUpdated = new JobInfo(); - jobInfoUpdated.setExtId("extid"); - assertThat(jobInfoArgumentCaptor.getValue(), equalTo(jobInfoUpdated)); - } -} diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index f18e391debc..85f6a4689ce 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -17,408 +17,390 @@ package feast.core.service; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import feast.core.config.StorageConfig.StorageSpecs; -import feast.core.dao.EntityInfoRepository; -import feast.core.dao.FeatureGroupInfoRepository; -import feast.core.dao.FeatureInfoRepository; +import com.google.api.client.util.Lists; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status; +import feast.core.CoreServiceProto.GetFeatureSetRequest; +import feast.core.CoreServiceProto.GetFeatureSetResponse; +import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.ListStoresRequest; +import feast.core.CoreServiceProto.ListStoresResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.SourceType; +import feast.core.StoreProto; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import feast.core.dao.FeatureSetRepository; +import feast.core.dao.StoreRepository; import feast.core.exception.RetrievalException; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureGroupInfo; -import feast.core.model.FeatureInfo; -import feast.core.model.StorageInfo; -import feast.core.storage.SchemaManager; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.ValueType; +import feast.core.model.FeatureSet; +import feast.core.model.Field; +import feast.core.model.Source; +import feast.core.model.Store; +import feast.types.ValueProto.ValueType.Enum; +import io.grpc.StatusRuntimeException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; import org.mockito.Mock; public class SpecServiceTest { - @Rule - public final ExpectedException exception = ExpectedException.none(); - @Mock - EntityInfoRepository entityInfoRepository; - @Mock - FeatureInfoRepository featureInfoRepository; - @Mock - FeatureGroupInfoRepository featureGroupInfoRepository; @Mock - SchemaManager schemaManager; + private FeatureSetRepository featureSetRepository; + @Mock - StorageSpecs storageSpecs; + private StoreRepository storeRepository; + + @Rule + public final ExpectedException expectedException = ExpectedException.none(); + + private SpecService specService; + private List featureSets; + private List stores; + private Source defaultSource; @Before public void setUp() { initMocks(this); + defaultSource = new Source(SourceType.KAFKA, + KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092").setTopic("my-topic") + .build(), true); + + FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1); + FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2); + FeatureSet featureSet1v3 = newDummyFeatureSet("f1", 3); + FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1); + + featureSets = Arrays.asList(featureSet1v1, featureSet1v2, featureSet1v3, featureSet2v1); + when(featureSetRepository.findAll()) + .thenReturn(featureSets); + when(featureSetRepository.findByName("f1")) + .thenReturn(featureSets.subList(0, 3)); + when(featureSetRepository.findByNameWithWildcard("f1")) + .thenReturn(featureSets.subList(0, 3)); + when(featureSetRepository.findByName("asd")) + .thenReturn(Lists.newArrayList()); + when(featureSetRepository.findByNameWithWildcard("f%")) + .thenReturn(featureSets); + + Store store1 = newDummyStore("SERVING"); + Store store2 = newDummyStore("WAREHOUSE"); + stores = Arrays.asList(store1, store2); + when(storeRepository.findAll()).thenReturn(stores); + when(storeRepository.findById("SERVING")).thenReturn(Optional.of(store1)); + when(storeRepository.findById("NOTFOUND")).thenReturn(Optional.empty()); + + + + specService = new SpecService(featureSetRepository, storeRepository, defaultSource); } - private EntityInfo newTestEntityInfo(String name) { - EntityInfo entity = new EntityInfo(); - entity.setName(name); - entity.setDescription("testing"); - return entity; + @Test + public void shouldGetAllFeatureSetsIfNoFilterProvided() throws InvalidProtocolBufferException { + ListFeatureSetsResponse actual = specService + .listFeatureSets(Filter.newBuilder().setFeatureSetName("").build()); + List list = new ArrayList<>(); + for (FeatureSet featureSet : featureSets) { + FeatureSetSpec toProto = featureSet.toProto(); + list.add(toProto); + } + ListFeatureSetsResponse expected = ListFeatureSetsResponse + .newBuilder() + .addAllFeatureSets( + list) + .build(); + assertThat(actual, equalTo(expected)); } - private StorageInfo newTestStorageInfo(String id, String type) { - StorageInfo storage = new StorageInfo(); - storage.setId(id); - storage.setType(type); - return storage; + @Test + public void shouldGetAllFeatureSetsMatchingNameIfNoVersionProvided() + throws InvalidProtocolBufferException { + ListFeatureSetsResponse actual = specService + .listFeatureSets(Filter.newBuilder().setFeatureSetName("f1").build()); + List expectedFeatureSets = featureSets.stream() + .filter(fs -> fs.getName().equals("f1")) + .collect(Collectors.toList()); + List list = new ArrayList<>(); + for (FeatureSet expectedFeatureSet : expectedFeatureSets) { + FeatureSetSpec toProto = expectedFeatureSet.toProto(); + list.add(toProto); + } + ListFeatureSetsResponse expected = ListFeatureSetsResponse + .newBuilder() + .addAllFeatureSets( + list) + .build(); + assertThat(actual, equalTo(expected)); } - private FeatureInfo newTestFeatureInfo(String name) { - FeatureInfo feature = new FeatureInfo(); - feature.setId(Strings.lenientFormat("entity.%s", name)); - feature.setName(name); - feature.setEntity(newTestEntityInfo("entity")); - feature.setDescription(""); - feature.setOwner("@test"); - feature.setValueType(ValueType.Enum.BOOL); - feature.setUri(""); - return feature; + @Test + public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() + throws InvalidProtocolBufferException { + ListFeatureSetsResponse actual = specService + .listFeatureSets(Filter.newBuilder().setFeatureSetName("f*").build()); + List expectedFeatureSets = featureSets.stream() + .filter(fs -> fs.getName().startsWith("f")) + .collect(Collectors.toList()); + List list = new ArrayList<>(); + for (FeatureSet expectedFeatureSet : expectedFeatureSets) { + FeatureSetSpec toProto = expectedFeatureSet.toProto(); + list.add(toProto); + } + ListFeatureSetsResponse expected = ListFeatureSetsResponse + .newBuilder() + .addAllFeatureSets( + list) + .build(); + assertThat(actual, equalTo(expected)); } @Test - public void shouldGetEntitiesMatchingIds() { - EntityInfo entity1 = newTestEntityInfo("entity1"); - EntityInfo entity2 = newTestEntityInfo("entity2"); - - ArrayList ids = Lists.newArrayList("entity1", "entity2"); - when(entityInfoRepository.findAllById(any(Iterable.class))) - .thenReturn(Lists.newArrayList(entity1, entity2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getEntities(ids); - List expected = Lists.newArrayList(entity1, entity2); + public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() + throws InvalidProtocolBufferException { + ListFeatureSetsResponse actual = specService + .listFeatureSets( + Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build()); + List expectedFeatureSets = featureSets.stream() + .filter(fs -> fs.getName().equals("f1")) + .filter(fs -> fs.getVersion() == 1) + .collect(Collectors.toList()); + List list = new ArrayList<>(); + for (FeatureSet expectedFeatureSet : expectedFeatureSets) { + FeatureSetSpec toProto = expectedFeatureSet.toProto(); + list.add(toProto); + } + ListFeatureSetsResponse expected = ListFeatureSetsResponse + .newBuilder() + .addAllFeatureSets( + list) + .build(); assertThat(actual, equalTo(expected)); } @Test - public void shouldDeduplicateGetEntities() { - EntityInfo entity1 = newTestEntityInfo("entity1"); - EntityInfo entity2 = newTestEntityInfo("entity2"); - - ArrayList ids = Lists.newArrayList("entity1", "entity2", "entity2"); - when(entityInfoRepository.findAllById(any(Iterable.class))) - .thenReturn(Lists.newArrayList(entity1, entity2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getEntities(ids); - List expected = Lists.newArrayList(entity1, entity2); + public void shouldGetAllFeatureSetsGivenVersionWithComparator() + throws InvalidProtocolBufferException { + ListFeatureSetsResponse actual = specService + .listFeatureSets( + Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion(">1").build()); + List expectedFeatureSets = featureSets.stream() + .filter(fs -> fs.getName().equals("f1")) + .filter(fs -> fs.getVersion() > 1) + .collect(Collectors.toList()); + List list = new ArrayList<>(); + for (FeatureSet expectedFeatureSet : expectedFeatureSets) { + FeatureSetSpec toProto = expectedFeatureSet.toProto(); + list.add(toProto); + } + ListFeatureSetsResponse expected = ListFeatureSetsResponse + .newBuilder() + .addAllFeatureSets( + list) + .build(); assertThat(actual, equalTo(expected)); } @Test - public void shouldThrowRetrievalExceptionIfAnyEntityNotFound() { - EntityInfo entity1 = newTestEntityInfo("entity1"); - - ArrayList ids = Lists.newArrayList("entity1", "entity2"); - when(entityInfoRepository.findAllById(ids)).thenReturn(Lists.newArrayList(entity1)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - - exception.expect(RetrievalException.class); - exception.expectMessage("unable to retrieve all entities requested"); - specService.getEntities(ids); + public void shouldGetLatestFeatureSetGivenMissingVersionFilter() + throws InvalidProtocolBufferException { + GetFeatureSetResponse actual = specService + .getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); + FeatureSet expected = featureSets.get(2); + assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @Test - public void shouldListAllEntitiesRegistered() { - EntityInfo entity1 = newTestEntityInfo("entity1"); - EntityInfo entity2 = newTestEntityInfo("entity2"); - - when(entityInfoRepository.findAll()).thenReturn(Lists.newArrayList(entity1, entity2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - - List actual = specService.listEntities(); - List expected = Lists.newArrayList(entity1, entity2); - assertThat(actual, equalTo(expected)); + public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter() + throws InvalidProtocolBufferException { + GetFeatureSetResponse actual = specService + .getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").setVersion(2).build()); + FeatureSet expected = featureSets.get(1); + assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @Test - public void shouldGetFeaturesMatchingIds() { - FeatureInfo feature1 = newTestFeatureInfo("feature1"); - FeatureInfo feature2 = newTestFeatureInfo("feature2"); - - ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2"); - when(featureInfoRepository.findAllById(any(Iterable.class))).thenReturn(Lists.newArrayList(feature1, feature2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getFeatures(ids); - List expected = Lists.newArrayList(feature1, feature2); - assertThat(actual, equalTo(expected)); + public void shouldThrowExceptionGivenMissingFeatureSetName() + throws InvalidProtocolBufferException { + expectedException.expect(StatusRuntimeException.class); + expectedException.expectMessage("INVALID_ARGUMENT: No feature set name provided"); + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setVersion(2).build()); } @Test - public void shouldDeduplicateGetFeature() { - FeatureInfo feature1 = newTestFeatureInfo("feature1"); - FeatureInfo feature2 = newTestFeatureInfo("feature2"); - - ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2", "entity.feature2"); - when(featureInfoRepository.findAllById(any(Iterable.class))).thenReturn(Lists.newArrayList(feature1, feature2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getFeatures(ids); - List expected = Lists.newArrayList(feature1, feature2); - assertThat(actual, equalTo(expected)); + public void shouldThrowExceptionGivenMissingFeatureSet() + throws InvalidProtocolBufferException { + expectedException.expect(StatusRuntimeException.class); + expectedException.expectMessage("NOT_FOUND: Feature set could not be found"); + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1000").setVersion(2).build()); } @Test - public void shouldThrowRetrievalExceptionIfAnyFeatureNotFound() { - FeatureInfo feature2 = newTestFeatureInfo("feature2"); - - ArrayList ids = Lists.newArrayList("entity.feature1", "entity.feature2"); - when(featureInfoRepository.findAllById(ids)).thenReturn(Lists.newArrayList(feature2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - exception.expect(RetrievalException.class); - exception.expectMessage("unable to retrieve all features requested: " + ids); - specService.getFeatures(ids); + public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator() + throws InvalidProtocolBufferException { + expectedException.expect(StatusRuntimeException.class); + expectedException.expectMessage("Invalid comparator '=<' provided."); + specService.listFeatureSets( + Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("=<1").build()); } @Test - public void shouldListAllFeaturesRegistered() { - FeatureInfo feature1 = newTestFeatureInfo("feature1"); - FeatureInfo feature2 = newTestFeatureInfo("feature2"); - - when(featureInfoRepository.findAll()).thenReturn(Lists.newArrayList(feature1, feature2)); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.listFeatures(); - List expected = Lists.newArrayList(feature1, feature2); - assertThat(actual, equalTo(expected)); + public void shouldReturnAllStoresIfNoNameProvided() throws InvalidProtocolBufferException { + ListStoresResponse actual = specService + .listStores(ListStoresRequest.Filter.newBuilder().build()); + ListStoresResponse.Builder expected = ListStoresResponse.newBuilder(); + for (Store expectedStore : stores) { + expected.addStore(expectedStore.toProto()); + } + assertThat(actual, equalTo(expected.build())); } @Test - public void shouldGetStorageMatchingIds() { - StorageInfo redisStorage = newTestStorageInfo("REDIS1", "REDIS"); - StorageInfo bqStorage = newTestStorageInfo("BIGQUERY1", "BIGQUERY"); - when(storageSpecs.getServingStorageSpec()).thenReturn(redisStorage.getStorageSpec()); - when(storageSpecs.getWarehouseStorageSpec()).thenReturn(bqStorage.getStorageSpec()); - - ArrayList ids = Lists.newArrayList("REDIS1", "BIGQUERY1"); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getStorage(ids); - List expected = Lists.newArrayList(redisStorage, bqStorage); - assertThat(actual, equalTo(expected)); + public void shouldReturnStoreWithName() throws InvalidProtocolBufferException { + ListStoresResponse actual = specService + .listStores(ListStoresRequest.Filter.newBuilder().setName("SERVING").build()); + List expectedStores = stores.stream().filter(s -> s.getName().equals("SERVING")) + .collect(Collectors.toList()); + ListStoresResponse.Builder expected = ListStoresResponse.newBuilder(); + for (Store expectedStore : expectedStores) { + expected.addStore(expectedStore.toProto()); + } + assertThat(actual, equalTo(expected.build())); } @Test - public void shouldDeduplicateGetStorage() { - StorageInfo redisStorage = newTestStorageInfo("REDIS1", "REDIS"); - StorageInfo bqStorage = newTestStorageInfo("BIGQUERY1", "BIGQUERY"); - when(storageSpecs.getServingStorageSpec()).thenReturn(redisStorage.getStorageSpec()); - when(storageSpecs.getWarehouseStorageSpec()).thenReturn(bqStorage.getStorageSpec()); - ArrayList ids = Lists.newArrayList("REDIS1", "BIGQUERY1", "BIGQUERY1"); - - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.getStorage(ids); - List expected = Lists.newArrayList(redisStorage, bqStorage); - assertThat(actual, equalTo(expected)); + public void shouldThrowRetrievalExceptionIfNoStoresFoundWithName() { + expectedException.expect(RetrievalException.class); + expectedException.expectMessage("Store with name 'NOTFOUND' not found"); + specService + .listStores(ListStoresRequest.Filter.newBuilder().setName("NOTFOUND").build()); } @Test - public void shouldThrowRetrievalExceptionIfAnyStorageNotFound() { - StorageInfo redisStorage = newTestStorageInfo("REDIS1", "REDIS"); - when(storageSpecs.getServingStorageSpec()).thenReturn(redisStorage.getStorageSpec()); - - ArrayList ids = Lists.newArrayList("REDIS1", "BIGQUERY1"); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - - exception.expect(RetrievalException.class); - exception.expectMessage("unable to retrieve all storage requested: " + ids); - specService.getStorage(ids); + public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged() + throws InvalidProtocolBufferException { + FeatureSetSpec incomingFeatureSet = featureSets.get(2) + .toProto() + .toBuilder() + .clearVersion() + .build(); + ApplyFeatureSetResponse applyFeatureSetResponse = specService + .applyFeatureSet(incomingFeatureSet); + + verify(featureSetRepository, times(0)).save(ArgumentMatchers.any(FeatureSet.class)); + assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE)); + assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(featureSets.get(2).toProto())); } @Test - public void shouldListAllStorageRegistered() { - StorageInfo redisStorage = newTestStorageInfo("REDIS1", "REDIS"); - StorageInfo bqStorage = newTestStorageInfo("BIGQUERY1", "BIGQUERY"); - when(storageSpecs.getServingStorageSpec()).thenReturn(redisStorage.getStorageSpec()); - when(storageSpecs.getWarehouseStorageSpec()).thenReturn(bqStorage.getStorageSpec()); - - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - List actual = specService.listStorage(); - List expected = Lists.newArrayList(redisStorage, bqStorage); - assertThat(actual, equalTo(expected)); + public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() + throws InvalidProtocolBufferException { + when(featureSetRepository.findByName("f2")).thenReturn(Lists.newArrayList()); + FeatureSetSpec incomingFeatureSet = newDummyFeatureSet("f2", 1) + .toProto() + .toBuilder() + .clearVersion() + .build(); + ApplyFeatureSetResponse applyFeatureSetResponse = specService + .applyFeatureSet(incomingFeatureSet); + verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); + FeatureSetSpec expected = incomingFeatureSet.toBuilder() + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); + assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(expected)); } @Test - public void shouldRegisterFeatureWithGroupInheritance() { - FeatureGroupInfo group = new FeatureGroupInfo(); - group.setId("testGroup"); - when(featureGroupInfoRepository.findById("testGroup")).thenReturn(Optional.of(group)); - - EntityInfo entity = new EntityInfo(); - entity.setName("entity"); - when(entityInfoRepository.findById("entity")).thenReturn(Optional.of(entity)); - - FeatureSpec spec = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("desc") - .setEntity("entity") - .setUri("uri") - .setGroup("testGroup") - .setValueType(ValueType.Enum.BYTES) - .build(); - - FeatureSpec resolvedSpec = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("desc") - .setEntity("entity") - .setUri("uri") - .setGroup("testGroup") - .setValueType(ValueType.Enum.BYTES) - .build(); - - ArgumentCaptor resolvedSpecCaptor = ArgumentCaptor.forClass(FeatureSpec.class); - - FeatureInfo featureInfo = new FeatureInfo(spec, entity, group); - when(featureInfoRepository.saveAndFlush(featureInfo)).thenReturn(featureInfo); - - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - FeatureInfo actual = specService.applyFeature(spec); - verify(schemaManager).registerFeature(resolvedSpecCaptor.capture()); - - assertThat(resolvedSpecCaptor.getValue(), equalTo(resolvedSpec)); - assertThat(actual, equalTo(featureInfo)); + public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() + throws InvalidProtocolBufferException { + FeatureSetSpec incomingFeatureSet = featureSets.get(2).toProto().toBuilder() + .clearVersion() + .addFeatures(FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) + .build(); + FeatureSetSpec expected = incomingFeatureSet.toBuilder() + .setVersion(4) + .setSource(defaultSource.toProto()) + .build(); + ApplyFeatureSetResponse applyFeatureSetResponse = specService + .applyFeatureSet(incomingFeatureSet); + verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); + assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); + assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(expected)); } @Test - public void shouldRegisterFeatureGroup() { - FeatureGroupSpec spec = - FeatureGroupSpec.newBuilder() - .setId("group") - .addTags("tag") - .build(); - FeatureGroupInfo expectedFeatureGroupInfo = new FeatureGroupInfo(spec); - - when(featureGroupInfoRepository.saveAndFlush(expectedFeatureGroupInfo)) - .thenReturn(expectedFeatureGroupInfo); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - FeatureGroupInfo actual = specService.applyFeatureGroup(spec); - assertThat(actual, equalTo(expectedFeatureGroupInfo)); + public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferException { + when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0))); + StoreProto.Store newStore = StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder()) + .addSubscriptions(Subscription.newBuilder().setName("a").setVersion(">1")) + .build(); + UpdateStoreResponse actual = specService + .updateStore(UpdateStoreRequest.newBuilder().setStore(newStore).build()); + UpdateStoreResponse expected = UpdateStoreResponse.newBuilder() + .setStore(newStore) + .setStatus(UpdateStoreResponse.Status.UPDATED) + .build(); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Store.class); + verify(storeRepository, times(1)).save(argumentCaptor.capture()); + assertThat(argumentCaptor.getValue().toProto(), equalTo(newStore)); + assertThat(actual, equalTo(expected)); } @Test - public void shouldRegisterEntity() { - EntitySpec spec = - EntitySpec.newBuilder() - .setName("entity") - .setDescription("description") - .addTags("tag") - .build(); - EntityInfo entityInfo = new EntityInfo(spec); - when(entityInfoRepository.saveAndFlush(entityInfo)).thenReturn(entityInfo); - SpecService specService = - new SpecService( - entityInfoRepository, - featureInfoRepository, - featureGroupInfoRepository, - schemaManager, - storageSpecs); - EntityInfo actual = specService.applyEntity(spec); - assertThat(actual, equalTo(entityInfo)); + public void shouldDoNothingIfNoChange() throws InvalidProtocolBufferException { + when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0))); + UpdateStoreResponse actual = specService + .updateStore(UpdateStoreRequest.newBuilder().setStore(stores.get(0).toProto()).build()); + UpdateStoreResponse expected = UpdateStoreResponse.newBuilder() + .setStore(stores.get(0).toProto()) + .setStatus(UpdateStoreResponse.Status.NO_CHANGE) + .build(); + verify(storeRepository, times(0)).save(ArgumentMatchers.any()); + assertThat(actual, equalTo(expected)); + } + + private FeatureSet newDummyFeatureSet(String name, int version) { + Field feature = new Field(name, "feature", Enum.INT64); + Field entity = new Field(name, "entity", Enum.STRING); + return new FeatureSet(name, version, 100L, Arrays.asList(entity), Arrays.asList(feature), + defaultSource); + } + + private Store newDummyStore(String name) { + // Add type to this method when we enable filtering by type + Store store = new Store(); + store.setName(name); + store.setType(StoreType.REDIS.toString()); + store.setSubscriptions(""); + store.setConfig(RedisConfig.newBuilder().setPort(6379).build().toByteArray()); + return store; } } + diff --git a/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java b/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java deleted file mode 100644 index 4e4408307b1..00000000000 --- a/core/src/test/java/feast/core/storage/BigQueryStorageManagerTest.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.Dataset; -import com.google.cloud.bigquery.DatasetInfo; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.FieldList; -import com.google.cloud.bigquery.LegacySQLTypeName; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardTableDefinition; -import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.TableDefinition; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.TableInfo; -import com.google.cloud.bigquery.ViewDefinition; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.ValueType; -import java.util.ArrayList; -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.Getter; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class BigQueryStorageManagerTest { - - private BigQueryStorageManager bqManager; - @Mock - private BigQuery bigQuery; - private String datasetName; - private String projectId; - private BigQueryViewTemplater templater; - - @Before - public void setUp() throws Exception { - datasetName = "dummyDataset"; - - MockitoAnnotations.initMocks(this); - BigQueryViewTemplater templater = - new BigQueryViewTemplater("{{tableName}}{{#features}}.{{name}}{{/features}}"); - bqManager = new BigQueryStorageManager("BIGQUERY1", bigQuery, projectId, datasetName, templater); - } - - @Test - public void shouldCreateNewDatasetAndTableAndViewIfNotExist() throws InterruptedException { - when(bigQuery.getDataset(any(String.class))).thenReturn(null); - String featureName = "my_feature"; - String entityName = "my_entity"; - String featureId = createFeatureId(entityName, featureName); - String description = "my feature is awesome"; - ValueType.Enum type = ValueType.Enum.INT64; - - FeatureSpec fs = - FeatureSpec.newBuilder() - .setEntity(entityName) - .setId(featureId) - .setName(featureName) - .setDescription(description) - .setValueType(type) - .build(); - - ArgumentCaptor datasetArg = ArgumentCaptor.forClass(DatasetInfo.class); - ArgumentCaptor tableInfoArg = ArgumentCaptor.forClass(TableInfo.class); - - Table expectedTable = mock(Table.class); - TableDefinition definition = StandardTableDefinition - .of(Schema.of(Field.of(featureName, LegacySQLTypeName.INTEGER))); - when(expectedTable.getDefinition()).thenReturn(definition); - when(bigQuery.create(any(TableInfo.class))).thenReturn(expectedTable); - - bqManager.registerNewFeature(fs); - - verify(bigQuery).create(datasetArg.capture()); - assertThat(datasetArg.getValue().getDatasetId().getDataset(), equalTo(datasetName)); - - verify(bigQuery, times(2)).create(tableInfoArg.capture()); - List capturedValues = tableInfoArg.getAllValues(); - - // verify table is created - TableInfo actualTable = capturedValues.get(0); - assertThat( - actualTable.getTableId().getTable(), - equalTo(String.format("%s", entityName))); - List fields = actualTable.getDefinition().getSchema().getFields(); - assertThat(fields.size(), equalTo(5)); - Field idField = fields.get(0); - assertThat(idField.getName(), equalTo("id")); - Field etsField = fields.get(1); - assertThat(etsField.getName(), equalTo("event_timestamp")); - Field ctsField = fields.get(2); - assertThat(ctsField.getName(), equalTo("created_timestamp")); - Field field = fields.get(4); - assertThat(field.getDescription(), equalTo(description)); - Field jobIdField = fields.get(3); - assertThat(jobIdField.getName(), equalTo("job_id")); - assertThat(field.getType(), equalTo(LegacySQLTypeName.INTEGER)); - assertThat(field.getName(), equalTo(featureName)); - - // verify view is created - TableInfo actualView = capturedValues.get(1); - assertThat( - actualView.getTableId().getTable(), - equalTo(String.format("%s_view", entityName))); - ViewDefinition actualDefinition = actualView.getDefinition(); - assertThat( - actualDefinition.getQuery(), - equalTo( - String.format( - "%s.%s", entityName, featureName))); - } - - @Test - public void shouldNotUpdateTableIfColumnExists() { - String featureName = "my_feature"; - String entityName = "my_entity"; - String featureId = createFeatureId(entityName, featureName); - String description = "my feature is awesome"; - ValueType.Enum type = ValueType.Enum.BOOL; - LegacySQLTypeName sqlType = LegacySQLTypeName.BOOLEAN; - - Table table = mock(Table.class); - TableDefinition tableDefinition = mock(TableDefinition.class); - when(table.getDefinition()).thenReturn(tableDefinition); - - Schema schema = mock(Schema.class); - when(tableDefinition.getSchema()).thenReturn(schema); - Field field = Field.of(featureName, sqlType); - FieldList fields = FieldList.of(field); - when(schema.getFields()).thenReturn(fields); - - when(bigQuery.getDataset(any(String.class))).thenReturn(mock(Dataset.class)); - when(bigQuery.getTable(any(TableId.class))).thenReturn(table); - FeatureSpec fs = - FeatureSpec.newBuilder() - .setEntity(entityName) - .setId(featureId) - .setName(featureName) - .setDescription(description) - .setValueType(type) - .build(); - - bqManager.registerNewFeature(fs); - - verify(bigQuery, never()).update(any(TableInfo.class)); - verify(bigQuery, never()).create(any(TableInfo.class)); - } - - @Test - public void shouldUpdateTableAndViewIfColumnNotExists() { - String newFeatureName = "my_feature"; - String entityName = "my_entity"; - String featureId = createFeatureId(entityName, newFeatureName); - String description = "my feature is awesome"; - ValueType.Enum type = ValueType.Enum.BOOL; - LegacySQLTypeName sqlType = LegacySQLTypeName.BOOLEAN; - String existingFeatureName = "old_feature"; - - FeatureSchema oldFeatureSchema = new FeatureSchema(existingFeatureName, sqlType, description); - FeatureSchema newFeatureSchema = new FeatureSchema(newFeatureName, sqlType, description); - Table oldTable = createTable(oldFeatureSchema); - Table newTable = createTable(oldFeatureSchema, newFeatureSchema); - - when(bigQuery.getDataset(any(String.class))).thenReturn(mock(Dataset.class)); - when(bigQuery.getTable(any(TableId.class))).thenReturn(oldTable); - FeatureSpec fs = - FeatureSpec.newBuilder() - .setEntity(entityName) - .setId(featureId) - .setName(newFeatureName) - .setDescription(description) - .setValueType(type) - .build(); - - when(bigQuery.update(any(TableInfo.class))).thenReturn(newTable); - - bqManager.registerNewFeature(fs); - - ArgumentCaptor updateTableArg = ArgumentCaptor.forClass(TableInfo.class); - verify(bigQuery, times(2)).update(updateTableArg.capture()); - - List capturedArgs = updateTableArg.getAllValues(); - - // check table - TableInfo actualTable = capturedArgs.get(0); - FieldList actualFieldList = actualTable.getDefinition().getSchema().getFields(); - assertThat(actualFieldList.size(), equalTo(2)); - assertThat(actualFieldList.get(0), - equalTo(oldTable.getDefinition().getSchema().getFields().get(0))); - Field newField = Field.newBuilder(newFeatureName, sqlType).setDescription(description).build(); - assertThat(actualFieldList.get(1), equalTo(newField)); - - // check view - TableInfo actualView = capturedArgs.get(1); - assertThat( - actualView.getTableId().getTable(), - equalTo(String.format("%s_view", entityName))); - ViewDefinition actualDefinition = actualView.getDefinition(); - assertThat( - actualDefinition.getQuery(), - equalTo( - String.format( - "%s.%s.%s", - entityName, - existingFeatureName, - newFeatureName))); - } - - private String createFeatureId( - String entityName, String featureName) { - return String.format("%s.%s", entityName, featureName).toLowerCase(); - } - - private Table createTable(FeatureSchema... featureSchemas) { - Table table = mock(Table.class); - TableDefinition tableDefinition = mock(TableDefinition.class); - when(table.getDefinition()).thenReturn(tableDefinition); - - Schema schema = mock(Schema.class); - when(tableDefinition.getSchema()).thenReturn(schema); - - List fieldList = new ArrayList<>(); - for (FeatureSchema featureSchema : featureSchemas) { - fieldList.add( - Field.newBuilder(featureSchema.getFeatureName(), featureSchema.getType()) - .setDescription(featureSchema.getDescription()) - .build()); - } - FieldList fields = FieldList.of(fieldList); - when(schema.getFields()).thenReturn(fields); - return table; - } - - @AllArgsConstructor - @Getter - private static class FeatureSchema { - String featureName; - LegacySQLTypeName type; - String description; - } -} diff --git a/core/src/test/java/feast/core/storage/BigQueryViewTemplaterTest.java b/core/src/test/java/feast/core/storage/BigQueryViewTemplaterTest.java deleted file mode 100644 index 38d1fc55dd9..00000000000 --- a/core/src/test/java/feast/core/storage/BigQueryViewTemplaterTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.storage; - -import org.junit.Test; - -import java.util.Arrays; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; - -public class BigQueryViewTemplaterTest { - @Test - public void shouldExecuteTemplateGivenTemplateValues() { - String testTemplate = - "{{project}}.{{dataset}}.{{tableName}}{{#features}}.{{name}}{{/features}}"; - BigQueryViewTemplater templater = new BigQueryViewTemplater(testTemplate); - String out = templater.getViewQuery("p", "ds", "tn", Arrays.asList("f1", "f2")); - assertThat(out, equalTo("p.ds.tn.f1.f2")); - } -} diff --git a/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java b/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java deleted file mode 100644 index 90d73834f15..00000000000 --- a/core/src/test/java/feast/core/training/BigQueryDatasetTemplaterTest.java +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.core.training; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.base.Charsets; -import com.google.common.io.CharStreams; -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import com.hubspot.jinjava.Jinjava; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.dao.FeatureInfoRepository; -import feast.core.model.EntityInfo; -import feast.core.model.FeatureInfo; -import feast.core.storage.BigQueryStorageManager; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import feast.types.ValueProto.ValueType.Enum; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import org.hibernate.engine.jdbc.internal.BasicFormatterImpl; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; - -public class BigQueryDatasetTemplaterTest { - - private BigQueryDatasetTemplater templater; - private BasicFormatterImpl formatter = new BasicFormatterImpl(); - - @Mock private FeatureInfoRepository featureInfoRespository; - private String sqlTemplate; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - StorageSpec storageSpec = - StorageSpec.newBuilder() - .setId("BIGQUERY1") - .setType(BigQueryStorageManager.TYPE) - .putOptions("project", "project") - .putOptions("dataset", "dataset") - .build(); - - Jinjava jinjava = new Jinjava(); - Resource resource = new ClassPathResource("templates/bq_training.tmpl"); - InputStream resourceInputStream = resource.getInputStream(); - sqlTemplate = CharStreams.toString(new InputStreamReader(resourceInputStream, Charsets.UTF_8)); - templater = - new BigQueryDatasetTemplater(jinjava, sqlTemplate, storageSpec, featureInfoRespository); - } - - @Test(expected = NoSuchElementException.class) - public void shouldThrowNoSuchElementExceptionIfFeatureNotFound() { - FeatureSet fs = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) - .build(); - templater.createQuery( - fs, Timestamps.fromSeconds(0), Timestamps.fromSeconds(1), 0, Collections.emptyMap()); - } - - @Test - public void shouldPassCorrectArgumentToTemplateEngine() { - StorageSpec storageSpec = - StorageSpec.newBuilder() - .setId("BIGQUERY1") - .setType(BigQueryStorageManager.TYPE) - .putOptions("project", "project") - .putOptions("dataset", "dataset") - .build(); - - Jinjava jinjava = mock(Jinjava.class); - templater = - new BigQueryDatasetTemplater(jinjava, sqlTemplate, storageSpec, featureInfoRespository); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-01T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2019-01-01T00:00:00.00Z").getEpochSecond()); - int limit = 100; - String featureId = "myentity.feature1"; - String featureName = "feature1"; - - when(featureInfoRespository.findAllById(any(List.class))) - .thenReturn(Collections.singletonList(createFeatureInfo(featureId, featureName, Enum.INT64))); - - FeatureSet fs = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList(featureId)) - .build(); - - templater.createQuery(fs, startDate, endDate, limit, Collections.emptyMap()); - - ArgumentCaptor templateArg = ArgumentCaptor.forClass(String.class); - ArgumentCaptor> contextArg = ArgumentCaptor.forClass(Map.class); - verify(jinjava).render(templateArg.capture(), contextArg.capture()); - - String actualTemplate = templateArg.getValue(); - assertThat(actualTemplate, equalTo(sqlTemplate)); - - Map actualContext = contextArg.getValue(); - assertThat(actualContext.get("start_date"), equalTo("2018-01-01")); - assertThat(actualContext.get("end_date"), equalTo("2019-01-01")); - assertThat(actualContext.get("limit"), equalTo(String.valueOf(limit))); - - List features = (List) actualContext.get("features"); - assertThat(features.get(0), equalTo(featureName)); - } - - @Test - public void shouldRenderCorrectQuery1() throws Exception { - String featureId1 = "myentity.feature1"; - String featureName1 = "feature1"; - String featureId2 = "myentity.feature2"; - String featureName2 = "feature2"; - - FeatureInfo featureInfo1 = createFeatureInfo(featureId1, featureName1, Enum.INT64); - FeatureInfo featureInfo2 = createFeatureInfo(featureId2, featureName2, Enum.INT64); - - String featureId3 = "myentity.feature3"; - String featureName3 = "feature3"; - FeatureInfo featureInfo3 = createFeatureInfo(featureId3, featureName3, Enum.INT64); - - when(featureInfoRespository.findAllById(any(List.class))) - .thenReturn(Arrays.asList(featureInfo1, featureInfo2, featureInfo3)); - - FeatureSet fs = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList(featureId1, featureId2, featureId3)) - .build(); - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - int limit = 100; - - String query = templater.createQuery(fs, startDate, endDate, limit, Collections.emptyMap()); - - checkExpectedQuery(query, "expQuery1.sql"); - } - - @Test - public void shouldRenderCorrectQuery2() throws Exception { - List featureInfos = new ArrayList<>(); - List featureIds = new ArrayList<>(); - - String featureId = "myentity.feature1"; - String featureName = "feature1"; - - featureInfos.add(createFeatureInfo(featureId, featureName, Enum.INT64)); - featureIds.add(featureId); - - when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - FeatureSet featureSet = - FeatureSet.newBuilder().setEntityName("myentity").addAllFeatureIds(featureIds).build(); - - String query = - templater.createQuery(featureSet, startDate, endDate, 1000, Collections.emptyMap()); - - checkExpectedQuery(query, "expQuery2.sql"); - } - - @Test - public void shouldRenderCorrectQueryWithNumberFilter() throws Exception { - List featureInfos = new ArrayList<>(); - List featureIds = new ArrayList<>(); - - String featureId = "myentity.feature1"; - String featureId2 = "myentity.feature2"; - String featureName = "feature1"; - String featureName2 = "feature2"; - - featureInfos.add(createFeatureInfo(featureId, featureName, Enum.INT64)); - featureInfos.add(createFeatureInfo(featureId2, featureName2, Enum.INT64)); - featureIds.add(featureId); - featureIds.add(featureId2); - - when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - FeatureSet featureSet = - FeatureSet.newBuilder().setEntityName("myentity").addAllFeatureIds(featureIds).build(); - - Map filter = new HashMap<>(); - filter.put("myentity.feature1", "10"); - - String query = - templater.createQuery(featureSet, startDate, endDate, 1000, filter); - - checkExpectedQuery(query, "expQueryWithNumberFilter.sql"); - } - - @Test - public void shouldRenderCorrectQueryWithStringFilter() throws Exception { - List featureInfos = new ArrayList<>(); - List featureIds = new ArrayList<>(); - - String featureId = "myentity.feature1"; - String featureId2 = "myentity.feature2"; - String featureName = "feature1"; - String featureName2 = "feature2"; - - featureInfos.add(createFeatureInfo(featureId, featureName, Enum.STRING)); - featureInfos.add(createFeatureInfo(featureId2, featureName2, Enum.STRING)); - featureIds.add(featureId); - featureIds.add(featureId2); - - when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - FeatureSet featureSet = - FeatureSet.newBuilder().setEntityName("myentity").addAllFeatureIds(featureIds).build(); - - Map filter = new HashMap<>(); - filter.put("myentity.feature1", "10"); - - String query = - templater.createQuery(featureSet, startDate, endDate, 1000, filter); - - checkExpectedQuery(query, "expQueryWithStringFilter.sql"); - } - - - @Test - public void shouldRenderCorrectQueryWithStringAndNumberFilter() throws Exception { - List featureInfos = new ArrayList<>(); - List featureIds = new ArrayList<>(); - - String featureId = "myentity.feature1"; - String featureId2 = "myentity.feature2"; - String featureName = "feature1"; - String featureName2 = "feature2"; - - featureInfos.add(createFeatureInfo(featureId, featureName, Enum.INT64)); - featureInfos.add(createFeatureInfo(featureId2, featureName2, Enum.STRING)); - featureIds.add(featureId); - featureIds.add(featureId2); - - when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - FeatureSet featureSet = - FeatureSet.newBuilder().setEntityName("myentity").addAllFeatureIds(featureIds).build(); - - Map filter = new HashMap<>(); - filter.put("myentity.feature1", "10"); - filter.put("myentity.feature2", "HELLO"); - - String query = - templater.createQuery(featureSet, startDate, endDate, 1000, filter); - - checkExpectedQuery(query, "expQueryWithNumberAndStringFilter.sql"); - } - - - @Test - public void shouldRenderCorrectQueryWithJobIdFilter() throws Exception { - List featureInfos = new ArrayList<>(); - List featureIds = new ArrayList<>(); - - String featureId = "myentity.feature1"; - String featureId2 = "myentity.feature2"; - String featureName = "feature1"; - String featureName2 = "feature2"; - - featureInfos.add(createFeatureInfo(featureId, featureName, Enum.INT64)); - featureInfos.add(createFeatureInfo(featureId2, featureName2, Enum.STRING)); - featureIds.add(featureId); - featureIds.add(featureId2); - - when(featureInfoRespository.findAllById(any(List.class))).thenReturn(featureInfos); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-02T00:00:00.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2018-01-30T12:11:11.00Z").getEpochSecond()); - FeatureSet featureSet = - FeatureSet.newBuilder().setEntityName("myentity").addAllFeatureIds(featureIds).build(); - - Map filter = new HashMap<>(); - filter.put("myentity.feature1", "10"); - filter.put("myentity.feature2", "HELLO"); - filter.put("job_id", "1234567890"); - - String query = - templater.createQuery(featureSet, startDate, endDate, 1000, filter); - - checkExpectedQuery(query, "expQueryWithJobIdFilter.sql"); - } - - private void checkExpectedQuery(String query, String pathToExpQuery) throws Exception { - String tmpl = - CharStreams.toString( - new InputStreamReader( - getClass().getClassLoader().getResourceAsStream("sql/" + pathToExpQuery), - Charsets.UTF_8)); - - String expQuery = formatter.format(tmpl.replaceAll("\\s+", " ").trim()); - query = formatter.format(query.replaceAll("\\s+", " ").trim()); - - assertThat(query, equalTo(expQuery)); - } - - private FeatureInfo createFeatureInfo(String featureId, String featureName, ValueType.Enum valueType) { - FeatureSpec fs = - FeatureSpec.newBuilder() - .setId(featureId) - .setName(featureName) - .setValueType(valueType) - .build(); - - EntitySpec entitySpec = EntitySpec.newBuilder().setName(featureId.split("\\.")[0]).build(); - EntityInfo entityInfo = new EntityInfo(entitySpec); - return new FeatureInfo(fs, entityInfo, null); - } -} diff --git a/core/src/test/java/feast/core/training/BigQueryTraningDatasetCreatorTest.java b/core/src/test/java/feast/core/training/BigQueryTraningDatasetCreatorTest.java deleted file mode 100644 index 5cbaa1017d2..00000000000 --- a/core/src/test/java/feast/core/training/BigQueryTraningDatasetCreatorTest.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.core.training; - -import com.google.cloud.bigquery.BigQuery; -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import feast.core.DatasetServiceProto.DatasetInfo; -import feast.core.DatasetServiceProto.FeatureSet; -import feast.core.storage.BigQueryStorageManager; -import feast.core.util.UuidProvider; -import feast.specs.StorageSpecProto.StorageSpec; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -// TODO: Should consider testing with "actual" BigQuery vs mocking it -// because the mocked BigQuery client is very basic and may miss important functionalities -// such as an actual table / dataset is actually created -// In the test method, should probably add a condition so that tests can be skipped if -// the user running the tests do not have permission to manage BigQuery (although ideally they should have) -// Example of adding the condition whether or not to accept the test result as valid: -// https://stackoverflow.com/questions/1689242/conditionally-ignoring-tests-in-junit-4 - -public class BigQueryTraningDatasetCreatorTest { - - public static final String projectId = "the-project"; - public static final String datasetPrefix = "feast"; - // class under test - private BigQueryTraningDatasetCreator creator; - @Mock - private BigQueryDatasetTemplater templater; - @Mock - private BigQuery bq; - @Mock - private UuidProvider uuidProvider; - - @Before - public void setUp() { - MockitoAnnotations.initMocks(this); - when(templater.getStorageSpec()).thenReturn(StorageSpec.newBuilder() - .setId("BIGQUERY1") - .setType(BigQueryStorageManager.TYPE) - .putOptions("project", "project") - .putOptions("dataset", "dataset") - .build()); - creator = new BigQueryTraningDatasetCreator(templater, projectId, datasetPrefix, uuidProvider, bq); - - when(uuidProvider.getUuid()).thenReturn("b0009f0f7df634ddc130571319e0deb9742eb1da"); - when(templater.createQuery( - any(FeatureSet.class), any(Timestamp.class), any(Timestamp.class), anyLong(), anyMap())) - .thenReturn("SELECT * FROM `project.dataset.table`"); - } - - @Test - public void shouldCreateCorrectDatasetIfPrefixNotSpecified() { - String entityName = "myentity"; - - FeatureSet featureSet = - FeatureSet.newBuilder() - .setEntityName(entityName) - .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) - .build(); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-01T10:15:30.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2019-01-01T10:15:30.00Z").getEpochSecond()); - long limit = 999; - String namePrefix = ""; - - DatasetInfo dsInfo = creator.createDataset(featureSet, startDate, endDate, limit, namePrefix, Collections - .emptyMap()); - assertThat( - dsInfo.getName(), equalTo("feast_myentity_b0009f0f7df634ddc130571319e0deb9742eb1da")); - assertThat( - dsInfo.getTableUrl(), - equalTo( - String.format( - "%s.dataset.%s_%s_%s", - projectId, datasetPrefix, entityName, "b0009f0f7df634ddc130571319e0deb9742eb1da"))); - } - - @Test - public void shouldCreateCorrectDatasetIfPrefixIsSpecified() { - String entityName = "myentity"; - - FeatureSet featureSet = - FeatureSet.newBuilder() - .setEntityName(entityName) - .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) - .build(); - - Timestamp startDate = - Timestamps.fromSeconds(Instant.parse("2018-01-01T10:15:30.00Z").getEpochSecond()); - Timestamp endDate = - Timestamps.fromSeconds(Instant.parse("2019-01-01T10:15:30.00Z").getEpochSecond()); - long limit = 999; - String namePrefix = "mydataset"; - - DatasetInfo dsInfo = creator.createDataset(featureSet, startDate, endDate, limit, namePrefix, Collections.emptyMap()); - assertThat( - dsInfo.getTableUrl(), - equalTo( - String.format( - "%s.dataset.%s_%s_%s_%s", - projectId, - datasetPrefix, - entityName, - namePrefix, - "b0009f0f7df634ddc130571319e0deb9742eb1da"))); - assertThat( - dsInfo.getName(), - equalTo("feast_myentity_mydataset_b0009f0f7df634ddc130571319e0deb9742eb1da")); - } - - @Test - public void shouldPassArgumentToTemplater() { - FeatureSet featureSet = - FeatureSet.newBuilder() - .setEntityName("myentity") - .addAllFeatureIds(Arrays.asList("myentity.feature1", "myentity.feature2")) - .build(); - - Timestamp startDate = Timestamps.fromSeconds(0); - Timestamp endDate = Timestamps.fromSeconds(1000); - long limit = 999; - String namePrefix = ""; - - creator.createDataset(featureSet, startDate, endDate, limit, namePrefix, Collections.emptyMap()); - - verify(templater).createQuery(featureSet, startDate, endDate, limit, Collections.emptyMap()); - } -} diff --git a/core/src/test/java/feast/core/util/PathUtilTest.java b/core/src/test/java/feast/core/util/PathUtilTest.java deleted file mode 100644 index 1a8f6196903..00000000000 --- a/core/src/test/java/feast/core/util/PathUtilTest.java +++ /dev/null @@ -1,19 +0,0 @@ -package feast.core.util; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; - -public class PathUtilTest { - - @Test - public void testGetPath() { - assertEquals("file:///tmp/foo/bar", PathUtil.getPath("/tmp/foo/bar").toUri().toString()); - } - - @Test - public void testGetPath_withGcs() { - - assertEquals("gs://tmp/foo/bar", PathUtil.getPath("gs://tmp/foo/bar").toUri().toString()); - } -} \ No newline at end of file diff --git a/core/src/test/java/feast/core/util/TypeConversionTest.java b/core/src/test/java/feast/core/util/TypeConversionTest.java index 97634665d07..decd26514d8 100644 --- a/core/src/test/java/feast/core/util/TypeConversionTest.java +++ b/core/src/test/java/feast/core/util/TypeConversionTest.java @@ -25,6 +25,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; public class TypeConversionTest { @Test @@ -69,4 +70,16 @@ public void convertMapToJsonStringShouldReturnJsonStringForGivenMap() { input.put("key", "value"); assertThat(TypeConversion.convertMapToJsonString(input), hasJsonPath("$.key", equalTo("value"))); } + + @Test + public void convertJsonStringToArgsShouldReturnCorrectListOfArgs() { + Map input = new HashMap<>(); + input.put("key", "value"); + input.put("key2", "value2"); + + String[] expected = new String[]{"--key=value", "--key2=value2"}; + String[] actual = TypeConversion.convertMapToArgs(input); + assertThat(actual.length, equalTo(expected.length)); + assertTrue(Arrays.asList(actual).containsAll(Arrays.asList(expected))); + } } diff --git a/core/src/test/java/feast/core/validators/SpecValidatorTest.java b/core/src/test/java/feast/core/validators/SpecValidatorTest.java deleted file mode 100644 index 2de054486ed..00000000000 --- a/core/src/test/java/feast/core/validators/SpecValidatorTest.java +++ /dev/null @@ -1,638 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.core.validators; - -import static feast.core.config.StorageConfig.DEFAULT_ERRORS_ID; -import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -import static org.mockito.Mockito.when; - -import feast.core.dao.EntityInfoRepository; -import feast.core.dao.FeatureGroupInfoRepository; -import feast.core.dao.FeatureInfoRepository; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureGroupSpecProto.FeatureGroupSpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.mockito.Mockito; - -public class SpecValidatorTest { - - @Rule - public final ExpectedException exception = ExpectedException.none(); - private FeatureInfoRepository featureInfoRepository; - private FeatureGroupInfoRepository featureGroupInfoRepository; - private EntityInfoRepository entityInfoRepository; - - @Before - public void setUp() { - featureInfoRepository = Mockito.mock(FeatureInfoRepository.class); - featureGroupInfoRepository = Mockito.mock(FeatureGroupInfoRepository.class); - entityInfoRepository = Mockito.mock(EntityInfoRepository.class); - } - - @Test - public void featureSpecWithoutIdShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = FeatureSpec.newBuilder().build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Id field cannot be empty"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithoutNameShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = FeatureSpec.newBuilder().setId("aa").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Name field cannot be empty"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithInvalidNameShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = FeatureSpec.newBuilder().setId("test").setName("hello there!").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for feature spec with id test failed: invalid value for " - + "field Name: argument must be in lower snake case, and cannot include any special characters."); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithoutOwnerShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = FeatureSpec.newBuilder().setId("id").setName("name").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Owner field cannot be empty"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithoutDescriptionShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder().setId("id").setName("name").setOwner("owner").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Description field cannot be empty"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithoutEntityShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("id") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Entity field cannot be empty"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithIdWithoutThreeWordsShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("id") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Id must contain entity, name"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithIdWithoutMatchingEntityShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("notentity.name") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Id must be in format entity.name, " - + "entity in Id does not match entity provided."); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithIdWithoutMatchingNameShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("entity.notname") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Id must be in format entity.name, " - + "name in Id does not match name provided."); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithoutExistingEntityShouldThrowIllegalArgumentException() { - when(entityInfoRepository.existsById("entity")).thenReturn(false); - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Entity with name entity does not exist"); - validator.validateFeatureSpec(input); - } - - @Test - public void featureSpecWithInvalidFeatureGroupShouldThrowIllegalArgumentException() { - when(entityInfoRepository.existsById("entity")).thenReturn(true); - when(featureGroupInfoRepository.existsById("group")).thenReturn(false); - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureSpec input = - FeatureSpec.newBuilder() - .setId("entity.name") - .setName("name") - .setOwner("owner") - .setDescription("dasdad") - .setEntity("entity") - .setGroup("group") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Group with id group does not exist"); - validator.validateFeatureSpec(input); - } - - - @Test - public void featureGroupSpecWithoutIdShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureGroupSpec input = FeatureGroupSpec.newBuilder().build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Id field cannot be empty"); - validator.validateFeatureGroupSpec(input); - } - - @Test - public void featureGroupSpecWithoutValidIdShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - FeatureGroupSpec input = FeatureGroupSpec.newBuilder().setId("NOT_VALID").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "invalid value for " - + "field Id: argument must be in lower snake case, and cannot include any special characters."); - validator.validateFeatureGroupSpec(input); - } - - @Test - public void entitySpecWithoutNameShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - EntitySpec input = EntitySpec.newBuilder().build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Name field cannot be empty"); - validator.validateEntitySpec(input); - } - - @Test - public void entitySpecWithInvalidNameShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - EntitySpec input = EntitySpec.newBuilder().setName("INVALID NAME!").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for entity spec with name INVALID NAME! failed:" - + " invalid value for field Name: argument must be in lower snake case, and cannot include " - + "any special characters."); - validator.validateEntitySpec(input); - } - - @Test - public void testServingStorageSpec_withValidTypes() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateServingStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_SERVING_ID) - .setType("redis").build()); - validator.validateServingStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_SERVING_ID) - .setType("bigtable").build()); - } - - @Test(expected = IllegalArgumentException.class) - public void testServingStorageSpec_withInvalidType() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateServingStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_SERVING_ID) - .setType("invalid").build()); - } - - @Test - public void testWarehouseStorageSpec_withValidTypes() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateWarehouseStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_WAREHOUSE_ID) - .setType("file.json").build()); - validator.validateWarehouseStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_WAREHOUSE_ID) - .setType("bigquery").build()); - } - - @Test(expected = IllegalArgumentException.class) - public void testWarehouseStorageSpec_withInvalidType() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateWarehouseStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_WAREHOUSE_ID) - .setType("invalid").build()); - } - - @Test - public void testErrorsStorageSpec_withValidTypes() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateErrorsStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_ERRORS_ID) - .setType("file.json").build()); - validator.validateErrorsStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_ERRORS_ID) - .setType("stderr").build()); - validator.validateErrorsStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_ERRORS_ID) - .setType("stderr").build()); - } - - @Test(expected = IllegalArgumentException.class) - public void testErrorsStorageSpec_withInvalidType() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - validator.validateErrorsStorageSpec(StorageSpec.newBuilder().setId(DEFAULT_ERRORS_ID) - .setType("invalid").build()); - } - - - @Test - public void storageSpecWithoutIdShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - StorageSpec input = StorageSpec.newBuilder().build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Id field cannot be empty"); - validator.validateStorageSpec(input); - } - - @Test - public void importSpecWithInvalidTypeShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = ImportSpec.newBuilder().setType("blah").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Validation for import spec failed: Type blah not supported"); - validator.validateImportSpec(input); - } - - @Test - public void pubsubImportSpecWithoutTopicOrSubscriptionShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = ImportSpec.newBuilder().setType("pubsub").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: Invalid options: Pubsub ingestion requires either topic or subscription"); - validator.validateImportSpec(input); - } - - @Test - public void fileImportSpecWithoutSupportedFileFormatShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = - ImportSpec.newBuilder().setType("file.wat?").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: Type file.wat? not supported"); - validator.validateImportSpec(input); - } - - @Test - public void fileImportSpecWithoutValidPathShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = ImportSpec.newBuilder().setType("file.csv").build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: Invalid options: File path cannot be empty"); - validator.validateImportSpec(input); - } - - @Test - public void fileImportSpecWithoutEntityIdColumnInSchemaShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = - ImportSpec.newBuilder() - .setType("file.csv") - .putSourceOptions("path", "gs://asdasd") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: entityId column must be specified in schema"); - validator.validateImportSpec(input); - } - - @Test - public void bigQueryImportSpecWithoutEntityIdColumnInSchemaShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - ImportSpec input = - ImportSpec.newBuilder() - .setType("bigquery") - .putSourceOptions("project", "my-google-project") - .putSourceOptions("dataset", "feast") - .putSourceOptions("table", "feast") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: entityId column must be specified in schema"); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithoutValidEntityShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - - ImportSpec input = - ImportSpec.newBuilder() - .setType("pubsub") - .putSourceOptions("topic", "my/pubsub/topic") - .addEntities("someEntity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Validation for import spec failed: Entity someEntity not registered"); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithUnregisteredFeaturesShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - when(featureInfoRepository.existsById("some_existing_feature")).thenReturn(true); - Schema schema = - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId("some_existing_feature").build()) - .addFields(Field.newBuilder().setFeatureId("some_nonexistent_feature").build()) - .build(); - ImportSpec input = - ImportSpec.newBuilder() - .setType("pubsub") - .putSourceOptions("topic", "my/pubsub/topic") - .setSchema(schema) - .addEntities("someEntity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: Feature some_nonexistent_feature not registered"); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithKafkaSourceAndCorrectOptionsShouldPassValidation() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - when(featureInfoRepository.existsById("some_existing_feature")).thenReturn(true); - when(entityInfoRepository.existsById("someEntity")).thenReturn(true); - Schema schema = - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId("some_existing_feature").build()) - .build(); - ImportSpec input = - ImportSpec.newBuilder() - .setType("kafka") - .putSourceOptions("topics", "my-kafka-topic") - .putSourceOptions("server", "localhost:54321") - .setSchema(schema) - .addEntities("someEntity") - .build(); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithCoalesceJobOptionsShouldPassValidation() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - when(featureInfoRepository.existsById("some_existing_feature")).thenReturn(true); - when(entityInfoRepository.existsById("someEntity")).thenReturn(true); - Schema schema = - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId("some_existing_feature").build()) - .build(); - ImportSpec input = - ImportSpec.newBuilder() - .setType("kafka") - .putSourceOptions("topics", "my-kafka-topic") - .putSourceOptions("server", "localhost:54321") - .putJobOptions("coalesceRows.enabled", "true") - .putJobOptions("coalesceRows.delaySeconds", "10000") - .putJobOptions("coalesceRows.timeoutSeconds", "20000") - .putJobOptions("sample.limit", "1000") - .setSchema(schema) - .addEntities("someEntity") - .build(); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithLimitJobOptionsShouldPassValidation() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - when(featureInfoRepository.existsById("some_existing_feature")).thenReturn(true); - when(entityInfoRepository.existsById("someEntity")).thenReturn(true); - Schema schema = - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId("some_existing_feature").build()) - .build(); - ImportSpec input = - ImportSpec.newBuilder() - .setType("kafka") - .putSourceOptions("topics", "my-kafka-topic") - .putSourceOptions("server", "localhost:54321") - .putJobOptions("sample.limit", "1000") - .setSchema(schema) - .addEntities("someEntity") - .build(); - validator.validateImportSpec(input); - } - - @Test - public void importSpecWithKafkaSourceWithoutOptionsShouldThrowIllegalArgumentException() { - SpecValidator validator = - new SpecValidator( - entityInfoRepository, - featureGroupInfoRepository, - featureInfoRepository); - when(featureInfoRepository.existsById("some_existing_feature")).thenReturn(true); - when(entityInfoRepository.existsById("someEntity")).thenReturn(true); - Schema schema = - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId("some_existing_feature").build()) - .build(); - ImportSpec input = - ImportSpec.newBuilder() - .setType("kafka") - .setSchema(schema) - .addEntities("someEntity") - .build(); - exception.expect(IllegalArgumentException.class); - exception.expectMessage( - "Validation for import spec failed: Invalid options: Kafka ingestion requires either topics or servers"); - validator.validateImportSpec(input); - } -} diff --git a/docs/.gitbook/assets/basic-architecture-diagram.svg b/docs/.gitbook/assets/basic-architecture-diagram.svg new file mode 100644 index 00000000000..b707f490461 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-1.png b/docs/.gitbook/assets/feast-docs-overview-diagram-1.png new file mode 100644 index 00000000000..d8f040f6eb6 Binary files /dev/null and b/docs/.gitbook/assets/feast-docs-overview-diagram-1.png differ diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-1.svg b/docs/.gitbook/assets/feast-docs-overview-diagram-1.svg new file mode 100644 index 00000000000..eb52accd911 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-1.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2.svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2.svg new file mode 100644 index 00000000000..7f30963ec78 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram.png b/docs/.gitbook/assets/feast-docs-overview-diagram.png new file mode 100644 index 00000000000..8856d4c0c75 Binary files /dev/null and b/docs/.gitbook/assets/feast-docs-overview-diagram.png differ diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram.svg b/docs/.gitbook/assets/feast-docs-overview-diagram.svg new file mode 100644 index 00000000000..e06e440c5dc --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-hierarchy.png b/docs/.gitbook/assets/feast-hierarchy.png new file mode 100644 index 00000000000..ed146f7e6f0 Binary files /dev/null and b/docs/.gitbook/assets/feast-hierarchy.png differ diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000000..ee27a2b7e56 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,39 @@ +# Overview + +Feast \(**Fea**ture **St**ore\) is a tool for managing and serving machine learning features. + +> Feast is the bridge between your models and your data + +Feast aims to: + +* Provide a unified means of managing feature data from a single person to large enterprises. +* Provide scalable and performant access to feature data when training and serving models. +* Provide consistent and point-in-time correct access to feature data. +* Enable discovery, documentation, and insights into your features. + +![](.gitbook/assets/feast-docs-overview-diagram-2.svg) + +**TL;DR:** Feast decouples feature engineering from feature usage. Features that are added to Feast become available immediately for training and serving. Models can retrieve the same features used in training from a low latency online store in production. + +This means that new ML projects start with a process of feature selection from a catalog instead of having to do feature engineering from scratch. + +```python +# Setting things up +fs = feast.Client('feast.example.com') +customer_ids = ['1001', '1002', '1003'] +customer_features = ['CreditScore', 'Balance', 'Age', 'NumOfProducts', 'IsActive'] +from_date = '2019-01-01' +to_date = '2019-12-31' + +# Training your model (typically from a notebook or pipeline) +data = fs.get_batch_features(customer_features, customer_ids, from_date, to_date) +my_model = ml.fit(data.to_train(), data.to_train()) + +# Serving predictions (when serving the model in production) +prediction = my_model.predict(fs.get_online_features(customer_features, customer_ids)) +``` + +The code above is for illustrative purposes. Please see our getting started guide for more realistic examples. + +For more reasons to use Feast, please see [Why Feast?](why-feast.md#why-feast) + diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md new file mode 100644 index 00000000000..8dfc629d4db --- /dev/null +++ b/docs/SUMMARY.md @@ -0,0 +1,15 @@ +# Table of contents + +* [Overview](README.md) +* [Why Feast?](why-feast.md) +* [Concepts](concepts.md) +* [Getting Help](community.md) + +## Getting Started + +* [Install Feast](getting-started/install-feast.md) + +## Reference + +* [Core API](reference/proto.md) + diff --git a/docs/api/README.md b/docs/api/README.md new file mode 100644 index 00000000000..47bb08a045e --- /dev/null +++ b/docs/api/README.md @@ -0,0 +1,2 @@ +# API + diff --git a/docs/architecture.png b/docs/assets/arch.png similarity index 100% rename from docs/architecture.png rename to docs/assets/arch.png diff --git a/docs/assets/feast-components-overview.png b/docs/assets/feast-components-overview.png new file mode 100644 index 00000000000..1f69bb7ed8e Binary files /dev/null and b/docs/assets/feast-components-overview.png differ diff --git a/docs/assets/protoc-gen-doc-markdown.tmpl b/docs/assets/protoc-gen-doc-markdown.tmpl new file mode 100644 index 00000000000..347335c9a4c --- /dev/null +++ b/docs/assets/protoc-gen-doc-markdown.tmpl @@ -0,0 +1,98 @@ +# Protocol Documentation + + +## Table of Contents +{{range .Files}} +{{$file_name := .Name}}- [{{.Name | replace "/" "." }}](#{{ mdfriendly .Name}}) + {{range .Services}} - [{{.Name}}](#{{ mdfriendly .Name}}) + {{end}} + {{range .Messages}} - [{{.LongName}}](#{{ mdfriendly .Name}}) + {{end}} + {{range .Enums}} - [{{.LongName}}](#{{ mdfriendly .Name}}) + {{end}} + {{range .Extensions}} - [File-level Extensions](#{{ mdfriendly $file_name}}-extensions) + {{end}} +{{end}} +- [Scalar Value Types](#scalar-value-types) + +{{range .Files}} +{{$file_name := .Name}} + +

Top

+ +## {{.Name | replace "/" "."}} +{{.Description}} + +{{range .Services}} + + +### {{.Name}} +{{.Description}} + +| Method Name | Request Type | Response Type | Description | +| ----------- | ------------ | ------------- | ------------| +{{range .Methods -}} + | {{.Name}} | [{{.RequestLongType}}](#{{.RequestLongType}}){{if .RequestStreaming}} stream{{end}} | [{{.ResponseLongType}}](#{{.ResponseLongType}}){{if .ResponseStreaming}} stream{{end}} | {{nobr .Description}} | +{{end}} +{{end}} + +{{range .Messages}} + + +### {{.LongName}} +{{.Description}} + +{{if .HasFields}} +| Field | Type | Label | Description | +| ----- | ---- | ----- | ----------- | +{{range .Fields -}} + | {{.Name}} | [{{.LongType}}](#{{.FullType}}) | {{.Label}} | {{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}} | +{{end}} +{{end}} + +{{if .HasExtensions}} +| Extension | Type | Base | Number | Description | +| --------- | ---- | ---- | ------ | ----------- | +{{range .Extensions -}} + | {{.Name}} | {{.LongType}} | {{.ContainingLongType}} | {{.Number}} | {{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}} | +{{end}} +{{end}} + +{{end}} + +{{range .Enums}} + + +### {{.LongName}} +{{.Description}} + +| Name | Number | Description | +| ---- | ------ | ----------- | +{{range .Values -}} + | {{.Name}} | {{.Number}} | {{nobr .Description}} | +{{end}} + +{{end}} + +{{if .HasExtensions}} + + +### File-level Extensions +| Extension | Type | Base | Number | Description | +| --------- | ---- | ---- | ------ | ----------- | +{{range .Extensions -}} + | {{.Name}} | {{.LongType}} | {{.ContainingLongType}} | {{.Number}} | {{nobr .Description}}{{if .DefaultValue}} Default: `{{.DefaultValue}}`{{end}} | +{{end}} +{{end}} + + + +{{end}} + +## Scalar Value Types + +| .proto Type | Notes | C++ Type | Java Type | Python Type | +| ----------- | ----- | -------- | --------- | ----------- | +{{range .Scalars -}} + | {{.ProtoType}} | {{.Notes}} | {{.CppType}} | {{.JavaType}} | {{.PythonType}} | +{{end}} \ No newline at end of file diff --git a/docs/community.md b/docs/community.md new file mode 100644 index 00000000000..f0ee257bdac --- /dev/null +++ b/docs/community.md @@ -0,0 +1,28 @@ +# Getting Help + +## Chat + +* Join our [Slack](https://join.slack.com/t/feast-dev/shared_invite/enQtODA1NTc4NTc4NDE4LWUyYjdiYjUyMGNkZWQzY2IxMjI0NDUwNzlmNjM3YzRkZTZiZGM4ZTJmNmU5ZTVkZDI5OTA3YzA5ZDNlMDJjMmE) channel if you want to chat to catch up on all things Feast! + +## GitHub + +* Feast GitHub Repo can be [found here](https://github.com/gojek/feast/). +* Found a bug or need a feature? [Create an issue on GitHub](https://github.com/gojek/feast/issues/new) + +## Mailing list + +#### Feast discussion + +* Google Group: [https://groups.google.com/d/forum/feast-discuss](https://groups.google.com/d/forum/feast-discuss + ) +* Mailing List: [feast-discuss@googlegroups.com](mailto:feast-discuss@googlegroups.com) + +#### Feast development + +* Google Group: [https://groups.google.com/d/forum/feast-dev](https://groups.google.com/d/forum/feast-dev) +* Mailing List: [feast-dev@googlegroups.com](mailto:feast-dev@googlegroups.com) + +### + + + diff --git a/docs/components.md b/docs/components.md deleted file mode 100644 index f242eee5938..00000000000 --- a/docs/components.md +++ /dev/null @@ -1,35 +0,0 @@ - -# Components - -### Core - -Feast Core is the central component that manages Feast and all other components within the system. It allows for the registration and management of entities, features, data stores, and other system resources. Core also manages the execution of feature ingestion jobs from batch and streaming sources, and provides the other Feast components with feature related information. - -### Stores - -Feast maintains data stores for the purposes of model training and serving features to models in production. Features are loaded into these stores by ingestion jobs from both streaming and batch sources. - -Two kinds of data stores are supported: - -Warehouse: The feature warehouse maintains all historical feature data. The warehouse can be queried for batch datasets which are then used for model training. - -Supported warehouse: __BigQuery__ - -Serving: Feast supports multiple serving stores which maintain feature values for access in a production serving environment. - -Supported serving stores: __Redis__, __Bigtable__ - -### Serving - -Feast Serving is an API used for for the retrieval of feature values by models in production. It allows for low latency and high throughput access to feature values from serving stores using Feast client libraries. The API abstracts away data access, allowing users to simultaneously query from multiple stores with a single gRPC or HTTP request. - -### Client Libraries - -Feast provides multiple client libraries for interacting with a Feast deployment. - -| Functionality | CLI | Go | Java | Python (WIP)| -|------------------------------|-----|-----|------|-------------| -| Feature Management | yes | no | no | yes | -| Data Ingestion (Jobs) | yes | no | no | yes | -| Feature Retrieval (Training) | no | no | no | yes | -| Feature Retrieval (Serving) | no | yes | yes | yes | diff --git a/docs/concepts.md b/docs/concepts.md index a16455bd0f2..1ec2639c7e3 100644 --- a/docs/concepts.md +++ b/docs/concepts.md @@ -1,30 +1,124 @@ # Concepts -### What is Feast? -Feast is a Feature Storage platform for Machine Learning features with the following attributes: +## Architecture -1. Ingestion and storage of ML features via batch or stream -2. Retrieval of ML features for serving via API, or via Google BigQuery to create training datasets -3. Maintaining of a feature catalog, including additional feature attribute information and discovery via API +![Logical diagram of a typical Feast deployment](.gitbook/assets/basic-architecture-diagram.svg) -Feast solves a need for standardising how features are stored, served and accessed, and encourages sharing and reuse of created features amongst data science teams. +The core components of a Feast deployment are -Feast does not prescribe how Features should be created. It allows for ingestion via batch or stream in a number of formats, e.g. batch import from CSV, BigQuery tables, streaming via Pub/Sub etc. +* **Feast Core:** Feast Core is a centralized service that acts as the authority on features within an organization. Typically there is only one "Core" deployment per organization, with all feature management happening through it. +* **Feast Ingestion Jobs:** Feast ingestion jobs retrieve feature data from user defined data sources and populate serving stores with this feature data. These jobs are managed by Feast Core. Data can either be sources from existing sources \(like [Kafka](https://kafka.apache.org/)\), or it can be loaded into Feast through its API. +* **Feast Serving:** Feast Serving is the data access layer through which end users and production systems retrieve feature data. Each Serving store is backed by one or more databases. These databases are updated by the Feast ingestion jobs. There are two types of stores: batch and online. Batch stores hold large volumes historical data, while online stores only hold the latest feature values. +## Data Model -### What is a Feature? +### Feature Set -A Feature is an individual measurable property or characteristic of an Entity. In the context of Feast a Feature has the following attributes: +User data is typically in the form of dataframes, tables in data warehouses, or events on a stream. These data sources are loaded into Feast in order to serve features for model training or serving. -* Entity - It must be associated with a known Entity within Feast -* ValueType - The feature type must be defined, e.g. String, Bytes, Int64, Int32, Float etc. -* Requirements - Properties related to how a feature should be stored for serving and training -* StorageType - For both serving and training a storage type must be defined +Feature sets allow for groups of fields in these data sources to be ingested and stored together. This allows for efficient storage and logical namespacing of data. -Feast needs to know these attributes in order to be able to ingest, store and serve a feature. A Feature is only a feature when Feast knows about it; This seems contrite, but it introduces a best practice whereby a feature only becomes available for ingestion, serving and training in production when Feast has added the feature to its catalog. +When data is loaded from these sources, each field in the feature set must be found in every record of the data source. Fields from these data sources must be either a timestamp, an entity, or a feature. -### What is an Entity? +{% hint style="info" %} +Feature sets are a grouping of feature sets based on how they are loaded into Feast. They ensure that data is efficiently stored during ingestion. Feature sets are not a grouping of features for retrieval of features. During retrieval it is possible to retrieve feature values from any number of feature sets. +{% endhint %} -An entity is a type with an associated key which generally maps onto a known domain object, e.g. Driver, Customer, Area, Merchant etc. An entity can also be a composite of other entities, with the corresponding composite key, e.g. DriverArea. +#### Customer Transactions Example + +Below is an example of a basic `customer transactions` feature set that has been exported to YAML: + +{% tabs %} +{% tab title="customer\_transactions\_feature\_set.yaml" %} +```yaml +name: customer_transactions +kind: feature_set +entities: +- name: customer_id + valueType: INT64 +features: +- name: daily_transactions + valueType: FLOAT +- name: total_transactions + valueType: FLOAT + maxAge: 3600s +``` +{% endtab %} +{% endtabs %} + +The dataframe below \(`customer_data.csv`\) contains the features and entities of the above feature set + +| datetime | customer\_id | daily\_transactions | total\_tra**nsactions** | +| :--- | :--- | :--- | :--- | +| 2019-01-01 01:00:00 | 20001 | 5.0 | 14.0 | +| 2019-01-01 01:00:00 | 20002 | 2.6 | 43.0 | +| 2019-01-01 01:00:00 | 20003 | 4.1 | 154.0 | +| 2019-01-01 01:00:00 | 20004 | 3.4 | 74.0 | + +In order to ingest feature data into Feast for this specific feature set: + +```python +# Load dataframe +customer_df = pd.read_csv("customer_data.csv") + +# Create feature set from YAML (using YAML is optional) +cust_trans_fs = FeatureSet.from_yaml("customer_transactions_feature_set.yaml") + +# Load feature data into Feast for this specific feature set +cust_trans_fs.ingest(dataframe=customer_data) +``` + +### Feature + +A feature is an individual measurable property or characteristic of a phenomenon being observed. Features are the most important concepts within a feature store. Feature data is used both as input to models during training and when models are served in production. + +In the context of Feast, features are values that are associated with either one or more entities over time. In Feast, these values are either primitives or lists of primitives. Each feature can also have additional information attached to it. For example whether it is a categorical feature or numerical. + +{% hint style="info" %} +Features in Feast are defined within Feature Sets and are not treated as standalone concepts. +{% endhint %} + +### Entity + +An entity type is any object in an organization that needs to be modeled and on which information should be stored. Entity types are usually recognizable concepts, either concrete or abstract, such as persons, places, things, or events which have relevance to the modeled system. + +An entity is an instance of an entity type. + +* Examples of entity types in the context of ride-hailing and food delivery: `customer`, `order`, `driver`, `restaurant`, `dish`, `area`. +* A specific driver, for example a driver with ID `D011234` would be an entity of the entity type `driver` + +An entity is the object on which features are observed. For example we could have a feature `total_trips_24h` on the driver `D01123` with a feature value of `11`. + +In the context of Feast, entities are important because they are used as keys when looking up feature values. Entities are also used when joining feature values between different feature sets in order to build one large data set to train a model, or to serve a model. + +{% hint style="info" %} +Entities in Feast are defined within Feature Sets and are not treated as standalone concepts. +{% endhint %} + +### Types + +Feast supports the following types for feature values + +* Bytes +* String +* Int32 +* Int64 +* Double +* Float +* Bool +* Bytes List +* String List +* Int32 List +* Int64 List +* Double List +* Float List +* Bool List + +## Glossary + +| Term | Description | +| :--- | :--- | +| Feast deployment | A complete Feast system as it is deployed. Consists out of a single Feast Core deployment and one or more Feast Serving deployments. | +| Feast Core | The centralized service which acts as a registry and authority of features. Organizations should only deploy a single Feast Core instance. Feast Core also manages the ingestion of feature data and population of Feast Serving data stores. | +| Feast Serving | Feast Serving is a service used to access both online and batch feature data. Feast Serving deployments are backed by one or more databases. | -An entity determines how a feature may be retrieved. e.g. for a Driver entity all driver features must be looked up with an associated driver id entity key. diff --git a/docs/endusers.md b/docs/endusers.md deleted file mode 100644 index 7543739a998..00000000000 --- a/docs/endusers.md +++ /dev/null @@ -1,143 +0,0 @@ -# Feast End Users Quickstart Guide - -## Pre-requisities - -* A working Feast Core: Consult your Feast admin or [install your own](install.md). -* Feast CLI tools: Use [pre-built - binaries](https://github.com/gojek/feast/releases) or [compile your - own](../cli/README.md). - -Make sure your CLI is correctly configured for your Feast Core. If -you're running a local Feast Core, it would be: -```sh -feast config set coreURI localhost -``` - -## Introduction - -There are several stages to using Feast: -1. Register your feature -2. Ingest data for your feature -3. Query feature data for training your models -4. Query feature data for serving your models - -## Registering your feature - -In order to register a feature, you will first need to register a: -* Storage location (typically done by your Feast admin) -* Entity - -All registrations are done using [specs](specs.md). - -### Registering an entity - -Then register an entity, which is for grouping features under a unique -key or id. Typically these map to a domain object, e.g., a customer, a -merchant, a sales region. - -[`wordEntity.yml`](../examples/wordEntity.yml) -``` -name: word -description: word found in shakespearean works -``` - -Register the entity spec: -```sh -feast apply entity wordEntity.yml -``` - -### Registering your feature - -Next, define your feature: - -[`wordCountFeature.yml`](../examples/wordCountFeature.yml) -``` -id: word.count -name: count -entity: word -owner: bob@feast.com -description: number of times the word appears -valueType: INT64 -uri: https://github.com/bob/example -``` - -Register it: -```sh -feast apply feature wordCountFeature.yml -``` - -## Ingest data for your feature - -Feast supports ingesting feature from 4 type of sources: - -* File (either CSV or JSON) -* Bigquery Table -* Pubsub Topic -* Pubsub Subscription - -Let's take a look on how to create an import job spec and ingest some data from a CSV file. -You may find more information on how to ingest data from different sources -here: [[Import Specs]](specs.md#import-spec) - -### Prepare your data -`word_counts.csv` -```csv -count,word -28944,the -27317,and -21120,i -20136,to -17181,of -14945,a -13989,you -12949,my -11513,in -11488,that -9545,is -8855,not -8293,with -8043,me -8003,it -... -``` - -And then upload it into your Google Storage bucket: - -```sh -gsutil cp word_counts.csv gs://your-bucket -``` - -### Define the job import spec -`shakespeareWordCountsImport.yml` -```yaml -type: file.csv -sourceOptions: - path: gs://your-bucket/word_counts.csv -entities: - - word -schema: - entityIdColumn: word - timestampValue: 2019-01-01T00:00:00.000Z - fields: - - name: count - featureId: word.count - - name: word -``` - -### Start the ingestion job -Next, use `feast` CLI to run your ingestion job, defined in -`shakespeareWordCountsImport.yml`: -```sh -feast jobs run shakespeareWordCountsImport.yml -``` - -You can also list recent ingestion jobs by running: -```sh -feast list jobs -``` - -Or get detailed information about the results of ingestion with: -```sh -feast get job -``` - \ No newline at end of file diff --git a/docs/getting-started/install-feast.md b/docs/getting-started/install-feast.md new file mode 100644 index 00000000000..9efd20bd02d --- /dev/null +++ b/docs/getting-started/install-feast.md @@ -0,0 +1,374 @@ +# Install Feast + +## Overview + +This installation guide will demonstrate two ways of installing Feast: + +* [**Minikube \(Minimal\)**](install-feast.md#minikube)**:** This installation has no external dependencies, but does not have a historical feature store installed. It allows users to quickly get a feel for Feast. +* \*\*\*\*[**Google Kubernetes Engine \(Recommended\):**](install-feast.md#google-kubernetes-engine) ****The guide that follows is a single cluster Feast installation on Google's GKE. It has Google Cloud specific dependencies like BigQuery, Dataflow, and Google Cloud Storage. + +## Minikube + +### Overview + +This guide will install Feast into [Minikube](https://github.com/kubernetes/minikube). Once Feast is installed you will be able to: + +* Define and register features. +* Load feature data from both batch and streaming sources. +* Retrieve features for online serving. + +{% hint style="warning" %} +This Minikube installation guide is for demonstration purposes only. It is not meant for production use, and does not install a historical feature store. +{% endhint %} + +### 0. Requirements + +The following software should be installed prior to starting: + +1. [Minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/) should be installed. +2. [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) installed and configured to work with Minikube. +3. [Helm](https://helm.sh/3) \(2.16.0 or greater\). + +### 1. Set up Minikube + +Start Minikube. Note the minimum cpu and memory below: + +```bash +minikube start --cpus=3 --memory=4096 --kubernetes-version='v1.15.5' +``` + +Set up your Feast environmental variables + +```bash +export FEAST_IP=$(minikube ip) +export FEAST_CORE_URL=${FEAST_IP}:32090 +export FEAST_SERVING_URL=${FEAST_IP}:32091 +``` + +### 2. Install Feast with Helm + +Clone the [Feast repository](https://github.com/gojek/feast/) and navigate to the `charts` sub-directory: + +```bash +git clone https://github.com/gojek/feast.git && \ +cd feast && export FEAST_HOME_DIR=$(pwd) && \ +cd infra/charts/feast +``` + +Copy the `values-demo.yaml` file for your installation: + +```bash +cp values-demo.yaml my-feast-values.yaml +``` + +Update all occurrences of the domain `feast.example.com` inside of `my-feast-values.yaml` with your Minikube IP. This is to allow external access to the services in the cluster. You can find your Minikube IP by running the following command `minikube ip`, or simply replace the text from the command line: + +```bash +sed -i "s/feast.example.com/${FEAST_IP}/g" my-feast-values.yaml +``` + +Install Tiller: + +```bash +helm init +``` + +Install the Feast Helm chart: + +```bash +helm install --name feast -f my-feast-values.yaml . +``` + +Ensure that the system comes online. This will take a few minutes + +```bash +watch kubectl get pods +``` + +```bash +NAME READY STATUS RESTARTS AGE +pod/feast-feast-core-666fd46db4-l58l6 1/1 Running 0 5m +pod/feast-feast-serving-online-84d99ddcbd 1/1 Running 0 6m +pod/feast-kafka-0 1/1 Running 0 3m +pod/feast-kafka-1 1/1 Running 0 4m +pod/feast-kafka-2 1/1 Running 0 4m +pod/feast-postgresql-0 1/1 Running 0 5m +pod/feast-redis-master-0 1/1 Running 0 5m +pod/feast-zookeeper-0 1/1 Running 0 5m +pod/feast-zookeeper-1 1/1 Running 0 5m +pod/feast-zookeeper-2 1/1 Running 0 5m +``` + +### 3. Connect to Feast with the Python SDK + +Install the Python SDK using pip: + +```bash +pip install -e ${FEAST_HOME_DIR}/sdk/python +``` + +Configure the Feast Python SDK: + +```bash +feast config set core_url ${FEAST_CORE_URL} +feast config set serving_url ${FEAST_SERVING_URL} +``` + +Make sure that both Feast Core and Feast Serving are connected: + +```bash +feast version +``` + +```javascript +{ + "sdk": { + "version": "feast 0.3.0" + }, + "core": { + "url": "192.168.99.100:32090", + "version": "0.3", + "status": "connected" + }, + "serving": { + "url": "192.168.99.100:32091", + "version": "0.3", + "status": "connected" + } +} +``` + +That's it! You can now start to use Feast! + +## Google Kubernetes Engine + +### Overview + +This guide will install Feast into a Kubernetes cluster on GCP. It assumes that all of your services will run within a single K8s cluster. Once Feast is installed you will be able to: + +* Define and register features. +* Load feature data from both batch and streaming sources. +* Retrieve features for model training. +* Retrieve features for online serving. + +{% hint style="info" %} +This guide requires [Google Cloud Platform](https://cloud.google.com/) for installation. + +* [BigQuery](https://cloud.google.com/bigquery/) is used for storing historical features. +* [Cloud Dataflow](https://cloud.google.com/dataflow/) is used for running data ingestion jobs. +* [Google Cloud Storage](https://cloud.google.com/storage/) is used for intermediate data storage. +{% endhint %} + +### 0. Requirements + +1. [Google Cloud SDK ](https://cloud.google.com/sdk/install)installed, authenticated, and configured to the project you will use. +2. [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) installed. +3. [Helm](https://helm.sh/3) \(2.16.0 or greater\) installed on your local machine with Tiller installed in your cluster. + +### 1. Set up GCP + +First define the environmental variables that we will use throughout this installation. Please customize these to reflect your environment. + +```bash +export FEAST_GCP_PROJECT_ID=my-gcp-project +export FEAST_GCP_REGION=us-central1 +export FEAST_GCP_ZONE=us-central1-a +export FEAST_BIGQUERY_DATASET_ID=feast +export FEAST_GCS_BUCKET=${FEAST_GCP_PROJECT_ID}_feast_bucket +export FEAST_GKE_CLUSTER_NAME=feast +export FEAST_S_ACCOUNT_NAME=feast-sa +``` + +Create a Google Cloud Storage bucket for Feast to stage data during exports: + +```bash +gsutil mb gs://${FEAST_GCS_BUCKET} +``` + +Create a BigQuery dataset for storing historical features: + +```bash +bq mk ${FEAST_BIGQUERY_DATASET_ID} +``` + +Create the service account that Feast will run as: + +```bash +gcloud iam service-accounts create ${FEAST_SERVICE_ACCOUNT_NAME} + +gcloud projects add-iam-policy-binding ${FEAST_GCP_PROJECT_ID} \ + --member serviceAccount:${FEAST_S_ACCOUNT_NAME}@${FEAST_GCP_PROJECT_ID}.iam.gserviceaccount.com \ + --role roles/editor + +gcloud iam service-accounts keys create key.json --iam-account \ +${FEAST_S_ACCOUNT_NAME}@${FEAST_GCP_PROJECT_ID}.iam.gserviceaccount.com +``` + +Ensure that [Dataflow API is enabled](https://console.cloud.google.com/apis/api/dataflow.googleapis.com/overview): + +```bash +gcloud services enable dataflow.googleapis.com +``` + +### 2. Set up a Kubernetes \(GKE\) cluster + +{% hint style="warning" %} +Provisioning a GKE cluster can expose your services publicly. This guide does not cover securing access to the cluster. +{% endhint %} + +Create a GKE cluster: + +```bash +gcloud container clusters create ${FEAST_GKE_CLUSTER_NAME} \ + --machine-type n1-standard-4 +``` + +Create a secret in the GKE cluster based on your local key `key.json`: + +```bash +kubectl create secret generic feast-gcp-service-account --from-file=key.json +``` + +For this guide we will use `NodePort` for exposing Feast services. In order to do so, we must find an internal IP of at least one GKE node. + +```bash +export FEAST_IP=$(kubectl describe nodes | grep InternalIP | awk '{print $2}' | head -n 1) +export FEAST_CORE_URL=${FEAST_IP}:32090 +export FEAST_ONLINE_SERVING_URL=${FEAST_IP}:32091 +export FEAST_BATCH_SERVING_URL=${FEAST_IP}:32092 +``` + +Confirm that you are able to access this node: + +```bash +ping $FEAST_IP +``` + +```bash +PING 10.123.114.11 (10.203.164.22) 56(84) bytes of data. +64 bytes from 10.123.114.11: icmp_seq=1 ttl=63 time=54.2 ms +64 bytes from 10.123.114.11: icmp_seq=2 ttl=63 time=51.2 ms +``` + +### 3. Set up Helm + +Run the following command to provide Tiller with authorization to install Feast: + +```bash +kubectl apply -f - < You can skip this step if you want to install Feast in an existing Kubernetes cluster -``` -gcloud container --project "${GCP_PROJECT}" clusters create "${FEAST_CLUSTER_NAME}" \ - --zone "${GCP_ZONE}" --no-enable-basic-auth --cluster-version "1.12.7-gke.10" \ - --machine-type "n1-standard-4" --image-type "COS" --disk-type "pd-standard" --disk-size "200" \ - --metadata disable-legacy-endpoints=true --scopes "https://www.googleapis.com/auth/devstorage.read_only","https://www.googleapis.com/auth/logging.write","https://www.googleapis.com/auth/monitoring","https://www.googleapis.com/auth/servicecontrol","https://www.googleapis.com/auth/service.management.readonly","https://www.googleapis.com/auth/trace.append" \ - --num-nodes "1" --enable-cloud-logging --enable-cloud-monitoring --no-enable-ip-alias \ - --network "projects/${GCP_PROJECT}/global/networks/${GCP_NETWORK}" \ - --subnetwork "projects/${GCP_PROJECT}/regions/${GCP_REGION}/subnetworks/${GCP_SUBNETWORK}" \ - --enable-autoscaling --min-nodes "1" --max-nodes "4" \ - --addons HorizontalPodAutoscaling,HttpLoadBalancing --enable-autoupgrade --enable-autorepair -``` - -Ensure that you are authenticated to manage the Kubernetes cluster -> You can skip this step if you are already authenticated to the cluster -```bash -gcloud container clusters get-credentials ${FEAST_CLUSTER_NAME} \ - --zone ${GCP_ZONE} --project ${GCP_PROJECT} -``` - -Run the following to create a virtual machine (VM) in Google Compute Engine (GCE) running redis server -> You can skip this step if you want to store Feast feature values for serving in an existing (non-clustered and non-authenticated) Redis instance. - -```bash -gcloud --project=${GCP_PROJECT} compute instances create ${FEAST_REDIS_GCE_INSTANCE_NAME} \ - --zone=${GCP_ZONE} \ - --boot-disk-size=200GB \ - --description="Redis instance for Feast serving store" \ - --machine-type=n1-highmem-2 \ - --network=${GCP_NETWORK} \ - --subnet=${GCP_SUBNETWORK} \ - --no-address \ - --metadata startup-script="apt-get -y install redis-server; echo 'bind 0.0.0.0' >> /etc/redis/redis.conf; systemctl enable redis; systemctl restart redis" -``` - -**NOTE** -Comment out `--no-address` option if you do not have a NAT router set up in your project (especially in newly created Google Cloud Project). This will assign a public IP to the redis instance so it can access the internet without a NAT router. With no public IP, the instance requires a NAT router to access internet. - -When the command above completes, it will output details of the newly created VM. Set the variable `FEAST_SERVING_REDIS_HOST` to the `INTERNAL_IP` value from the `gcloud` output. - -```bash -# Example output: -# NAME ZONE MACHINE_TYPE INTERNAL_IP STATUS -# feast-redis asia-east1-a n1-highmem-2 10.148.1.122 RUNNING - -FEAST_SERVING_REDIS_HOST=10.148.1.122 -``` - -#### Feast Service Account - -By default, clusters created in GKE have only basic permissions to read from Google Cloud Storage and write system logs. We need to create a new service account for Feast, which has permission to run Dataflow jobs, manage BigQuery and write to Google Cloud Storage. - -```bash -gcloud --project=${GCP_PROJECT} iam service-accounts create ${FEAST_SERVICE_ACCOUNT_NAME} \ - --display-name="Feast service account" - -for role in bigquery.dataEditor bigquery.jobUser storage.admin dataflow.admin; do - gcloud projects add-iam-policy-binding ${GCP_PROJECT} \ - --member serviceAccount:${FEAST_SERVICE_ACCOUNT_NAME}@${GCP_PROJECT}.iam.gserviceaccount.com \ - --role=roles/${role} | tail -n2 -done - -# Create a JSON service account key -gcloud iam service-accounts keys create /tmp/service-account.json \ - --iam-account ${FEAST_SERVICE_ACCOUNT_NAME}@${GCP_PROJECT}.iam.gserviceaccount.com - -# Create secret resources in the Kube cluster for the service account and Postgres password -kubectl create secret generic ${FEAST_HELM_RELEASE_NAME}-service-account \ - --from-file=/tmp/service-account.json - -kubectl create secret generic ${FEAST_HELM_RELEASE_NAME}-postgresql \ - --from-literal=postgresql-password=${FEAST_POSTGRES_PASSWORD} -``` - -#### Install Feast Helm Release - -> Make sure you have `Helm` command installed, if not follow this link -> https://helm.sh/docs/using_helm/#installing-helm - -Ensure you have installed Tiller (the server component of Helm) in your Kubernetes cluster. If not run the following commands: -```bash -# Create service account for Tiller and grant cluster admin permission -kubectl apply -f - < This is the **recommended** configuration for installing Feast in **production** where Core and Serving component of Feast are exposed **internally** via **LoadBalancer** service type. -> -> If you have not setup Virtual Private Connection (VPN) or you just want to test Feast, you may need to comment out the following fields below: -> - `core.service.annotations` -> - `core.service.loadBalancerSourceRanges` -> - `serving.service.annotations` -> - `serving.service.loadBalancerSourceRanges` - -**IMPORTANT NOTE** - -Commenting out those fields is **not** recommended, however, because it will **expose your Feast service to the public internet**. - -> A better approach if you don't have VPN setup to your Google Cloud network is to create a new VM that will act as a [bastion host](https://en.wikipedia.org/wiki/Bastion_host). You will then access Feast services after SSH-ing to this bastion host (because you can then access Feast services via the internal load balancer IP). - -```bash -cat < /tmp/feast-helm-values.yaml -global: - postgresql: - existingSecret: ${FEAST_HELM_RELEASE_NAME}-postgresql - -core: - projectId: ${GCP_PROJECT} - jobs: - runner: DataflowRunner - options: '{"project": "${GCP_PROJECT}","region": "${GCP_REGION}","subnetwork": "regions/${GCP_REGION}/subnetworks/${GCP_SUBNETWORK}", "maxNumWorkers": "${GCP_DATAFLOW_MAX_NUM_WORKERS}", "autoscalingAlgorithm": "THROUGHPUT_BASED"}' - service: - type: LoadBalancer - annotations: - cloud.google.com/load-balancer-type: Internal - loadBalancerSourceRanges: - - 10.0.0.0/8 - - 172.16.0.0/12 - - 192.168.0.0/16 - -serving: - service: - type: LoadBalancer - annotations: - cloud.google.com/load-balancer-type: Internal - loadBalancerSourceRanges: - - 10.0.0.0/8 - - 172.16.0.0/12 - - 192.168.0.0/16 - -dataflow: - projectID: ${GCP_PROJECT} - location: ${GCP_REGION} - -store: - serving: - options: '{"host": "${FEAST_SERVING_REDIS_HOST}", "port": 6379}' - warehouse: - options: '{"project": "${GCP_PROJECT}", "dataset": "${FEAST_WAREHOUSE_BIGQUERY_DATASET}"}' - -serviceAccount: - name: ${FEAST_HELM_RELEASE_NAME}-service-account - key: service-account.json -EOF -``` - -Install Feast using the values in `/tmp/feast-helm-values.yaml`. - -> Make sure you have cloned Feast repository and your **current directory is Feast repository root folder**. -> Otherwise, run the following: -> `git clone https://github.com/gojek/feast.git` -> `cd feast` - -Then run the following to install Feast Helm release: - -```bash -helm install --name ${FEAST_HELM_RELEASE_NAME} ./charts/feast -f /tmp/feast-helm-values.yaml -``` - -Wait for about 2 minutes :alarm_clock: then make sure Feast deployment is successful. -```bash -# Make sure all the pods have statuses "READY: 1/1" and "STATUS: RUNNING" -kubectl get pod - -# Retrieve the internal load balancer IP for feast-core and feast-serving services -kubectl get svc --selector release=${FEAST_HELM_RELEASE_NAME} -# NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE -# feast-core LoadBalancer 10.71.250.224 10.148.2.69 80:31652/TCP,6565:30148/TCP 2m30s -# feast-postgresql ClusterIP 10.71.249.72 5432/TCP 2m30s -# feast-postgresql-headless ClusterIP None 5432/TCP 2m30s -# feast-serving LoadBalancer 10.71.244.137 10.148.2.71 6565:30540/TCP,80:31324/TCP 2m30s - -# Set the following variables based on the "EXTERNAL-IP" values above -# These variables will be used by Feast CLI and Feast SDK later -FEAST_CORE_URI=10.148.2.69:6565 -FEAST_SERVING_URI=10.148.2.71:6565 -``` - -#### Test the Installation - -To ensure that Feast has been succesfully installed, we'll run the following tests: -- Create entity -- Create features for the entity -- Ingest feature values via batch import job -- Retrieve the values from Feast serving - -Make sure you have installed `feast` command and `feast` Python package. If not follow this [link](../cli/README.md#installation) to install Feast CLI and run `pip3 install -U feast` to install the Python package. - -```bash -# Configure feast CLI so it knows the URI for Feast Core -feast config set coreURI ${FEAST_CORE_URI} - -# Ensure your working directory is Feast repository root folder -feast apply entity integration-tests/testdata/entity_specs/entity_1.yaml -feast apply feature integration-tests/testdata/feature_specs/entity_1.feature_*.yaml - -# The batch import job needs to know the file path of the feature value file -# We'll upload it to Google Cloud Storage -gsutil cp integration-tests/testdata/feature_values/ingestion_1.csv \ - ${FEAST_STAGING_LOCATION_GCS_URI}/ingestion_1.csv - -# Prepare the import job specification file -cat < /tmp/feast-batch-import.yaml -type: file.csv - -sourceOptions: - path: ${FEAST_STAGING_LOCATION_GCS_URI}/ingestion_1.csv - -entities: -- entity_1 - -schema: - entityIdColumn: entity - timestampColumn: ts - fields: - - name: entity - - name: ts - - name: feature_1 - featureId: entity_1.feature_1 - - name: feature_2 - featureId: entity_1.feature_2 - - name: feature_3 - featureId: entity_1.feature_3 - - name: feature_4 - featureId: entity_1.feature_4 -EOF - -# Start the import job with feast cli -# This will start a Dataflow job that ingests the csv file containing the feature values -# into Feast warehouse (in BigQuery) and serving store (in Redis) -feast jobs run /tmp/feast-batch-import.yaml --wait - -# If the job fails, you can open Google Cloud Console and check the Dataflow dashboard -# for details - -# After the import job completed successfully, we can try retrieving the feature values -# from Feast serving using the Python SDK -python3 - <0" +# +# store.yaml: +# name: bigquery +# type: BIGQUERY +# bigquery_config: +# project_id: PROJECT_ID +# dataset_id: DATASET_ID +# subscriptions: +# - name: "*" +# version: ">0" + +# springConfigMountPath is the directory path where application.yaml and +# store.yaml will be mounted in the container. +springConfigMountPath: /etc/feast/feast-serving + +# gcpServiceAccount is the service account that Feast Core will use. +gcpServiceAccount: + # useExistingSecret specifies Feast to use an existing secret containing Google + # Cloud service account JSON key file. + useExistingSecret: false + existingSecret: + # name is the secret name of the existing secret for the service account. + name: feast-gcp-service-account + # key is the secret key of the existing secret for the service account. + # key is normally derived from the file name of the JSON key file. + key: key.json + # mountPath is the directory path where the JSON key file will be mounted. + # the value of "existingSecret.key" is file name of the service account file. + mountPath: /etc/gcloud/service-accounts + +# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) +# running Feast Core. +# +# For example, it is good practice to set min and max heap size in JVM. +# https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size +# +# Refer to https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html +# to see other JVM options that can be set. +# +# jvmOptions: +# - -Xms768m +# - -Xmx768m + +livenessProbe: + enabled: false + initialDelaySeconds: 60 + periodSeconds: 10 + timeoutSeconds: 5 + successThreshold: 1 + failureThreshold: 5 + +readinessProbe: + enabled: false + initialDelaySeconds: 15 + periodSeconds: 10 + timeoutSeconds: 10 + successThreshold: 1 + failureThreshold: 5 + +service: + type: ClusterIP + http: + port: 80 + targetPort: 8080 + # nodePort is the port number that each cluster node will listen to + # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + # + # nodePort: + grpc: + port: 6566 + targetPort: 6566 + # nodePort is the port number that each cluster node will listen to + # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport + # + # nodePort: + +ingress: + enabled: false + annotations: {} + # kubernetes.io/ingress.class: nginx + hosts: + # - host: chart-example.local + # port: http + +resources: {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +nodeSelector: {} + +tolerations: [] + +affinity: {} diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore b/infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore new file mode 100644 index 00000000000..c13e3c8fbb2 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore @@ -0,0 +1,21 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml new file mode 100644 index 00000000000..98a9356dcd5 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +appVersion: 0.8.0 +description: A Helm chart for prometheus statsd-exporter Scrape metrics stored statsd +home: https://github.com/prometheus/statsd_exporter +keywords: + - prometheus + - statsd +maintainers: + - name: enflo + email: toniflorithomar@gmail.com +name: prometheus-statsd-exporter +version: 0.1.2 \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/README.md b/infra/charts/feast/charts/prometheus-statsd-exporter/README.md new file mode 100644 index 00000000000..69eb33039b8 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/README.md @@ -0,0 +1,56 @@ +# Prometheus statsd-exporter + + ## TL;DR; + + ```console +$ helm install incubator/prometheus-statsd-exporter +``` + + ## Introduction + + This chart bootstraps a prometheus-statsd-exporter deployment on a [Kubernetes](http://kubernetes.io) cluster using the [Helm](https://helm.sh) package manager. + + ## Installing the Chart + + To install the chart with the release name `my-release`: + + ```console +$ helm install incubator/prometheus-statsd-exporter --name my-release +``` + + + The command deploys prometheus-statsd-exporter on the Kubernetes cluster in the default configuration. The [configuration](#configuration) section lists the parameters that can be configured during installation. + + ## Uninstalling the Chart + + To uninstall/delete the `my-release` deployment: + + ```console +$ helm delete my-release +``` + + The command removes all the Kubernetes components associated with the chart and deletes the release. + + ## Configuration + + |Parameter | Description | Default | +|`extraArgs` | key:value list of extra arguments to give the binary | `{}` | +|`image.pullPolicy` | Image pull policy | `IfNotPresent` | +|`image.repository` | Image repository | `prom/statsd-exporter` | +|`image.tag` | Image tag | `v0.8.0` | +|`ingress.enabled` | enable ingress | `false` | +|`ingress.path` | ingress base path | `/` | +|`ingress.host` | Ingress accepted hostnames | `nil` | +|`ingress.tls` | Ingress TLS configuration | `[]` | +|`ingress.annotations` | Ingress annotations | `{}` | +|`service.type` | type of service | `ClusterIP` | +|`tolerations` | List of node taints to tolerate | `[]` | +|`resources` | pod resource requests & limits | `{}` | +| `persistence.enabled` | Create a volume to store data | true | + + Alternatively, a YAML file that specifies the values for the above parameters can be provided while installing the chart. For example, + + ```console +$ helm install incubator/prometheus-statsd-exporter --name my-release -f values.yaml +``` +> **Tip**: You can use the default [values.yaml](values.yaml) \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt new file mode 100644 index 00000000000..bbd06f118ac --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt @@ -0,0 +1,17 @@ + +To verify that prometheus-statsd-exporter has started, run: + +{{- if contains "NodePort" .Values.service.type }} + export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ template "prometheus-statsd-exporter.fullname" . }}) + export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") + echo http://$NODE_IP:$NODE_PORT +{{- else if contains "LoadBalancer" .Values.service.type }} + NOTE: It may take a few minutes for the LoadBalancer IP to be available. + You can watch the status of by running 'kubectl get svc --namespace {{ .Release.Namespace }} -w {{ template "prometheus-statsd-exporter.fullname" . }}' + + export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ template "prometheus-statsd-exporter.fullname" . }} -o jsonpath='{.status.loadBalancer.ingress[0].ip}') + echo http://$SERVICE_IP:{{ .Values.service.servicePort }} +{{- else if contains "ClusterIP" .Values.service.type }} + export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app={{ template "prometheus-statsd-exporter.name" . }},component={{ .Chart.Name }}" -o jsonpath="{.items[0].metadata.name}") + kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 9090 +{{- end }} \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl new file mode 100644 index 00000000000..1c5f01342b8 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl @@ -0,0 +1,44 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "prometheus-statsd-exporter.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "prometheus-statsd-exporter.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "prometheus-statsd-exporter.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + + +{{/* +Create the name of the service account to use +*/}} +{{- define "prometheus-statsd-exporter.serviceAccountName" -}} +{{- if .Values.serviceAccount.enable -}} + {{ default (include "prometheus-statsd-expoter.fullname" .) .Values.serviceAccount.name }} +{{- else -}} + {{ default "default" .Values.serviceAccount.name }} +{{- end -}} +{{- end -}} \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml new file mode 100644 index 00000000000..0f9de1e9538 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "prometheus-statsd-exporter.fullname" . }}-config + labels: + app: {{ template "prometheus-statsd-exporter.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +data: + statsd_mappings.yaml: | +# +# defaults: +# ttl: "45s" \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml new file mode 100644 index 00000000000..47308ef89bd --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml @@ -0,0 +1,80 @@ +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: {{ template "prometheus-statsd-exporter.fullname" . }} + labels: + app: {{ template "prometheus-statsd-exporter.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +spec: + replicas: {{ .Values.statsdexporter.replicaCount }} + selector: + matchLabels: + app: {{ template "prometheus-statsd-exporter.name" . }} + release: {{ .Release.Name }} + template: + metadata: + annotations: +{{ toYaml .Values.statsdexporter.annotations | indent 8 }} + labels: + app: {{ template "prometheus-statsd-exporter.name" . }} + release: {{ .Release.Name }} + spec: + serviceAccountName: {{ template "prometheus-statsd-exporter.serviceAccountName" . }} + containers: + - name: {{ .Chart.Name }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + args: + - --statsd.mapping-config=/etc/statsd_conf/statsd_mappings.yaml + {{- range $key, $value := .Values.statsdexporter.extraArgs }} + - --{{ $key }}={{ $value }} + {{- end }} + volumeMounts: + - mountPath: /data + name: {{ .Values.persistentVolume.name }} + - name: statsd-config + mountPath: /etc/statsd_conf + env: + - name: HOME + value: /data + ports: + - name: metrics + containerPort: 9102 + protocol: TCP + - name: statsd-tcp + containerPort: 9125 + protocol: TCP + - name: statsd-udp + containerPort: 9125 + protocol: UDP + livenessProbe: + httpGet: + path: /#/status + port: 9102 + initialDelaySeconds: 10 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /#/status + port: 9102 + initialDelaySeconds: 10 + timeoutSeconds: 10 + resources: +{{ toYaml .Values.statsdexporter.resources | indent 12 }} +{{- if .Values.statsdexporter.nodeSelector }} + nodeSelector: +{{ toYaml .Values.statsdexporter.nodeSelector | indent 8 }} + {{- end }} + volumes: + - name: statsd-config + configMap: + name: {{ template "prometheus-statsd-exporter.fullname" . }}-config + - name: {{ .Values.persistentVolume.name }} + {{- if .Values.persistentVolume.enabled }} + persistentVolumeClaim: + claimName: {{ if .Values.persistentVolume.claimName }}{{- else }}{{ template "prometheus-statsd-exporter.fullname" . }}{{- end }} + {{- else }} + emptyDir: {} + {{- end -}} diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml new file mode 100644 index 00000000000..e3149200595 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml @@ -0,0 +1,23 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + app: {{ template "prometheus-statsd-exporter.fullname" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + component: "{{ .Chart.Name }}" + heritage: {{ .Release.Service }} + release: {{ .Release.Name }} + name: {{ template "prometheus-statsd-exporter.fullname" . }} +spec: + accessModes: +{{ toYaml .Values.persistentVolume.accessModes | indent 4 }} +{{- if .Values.persistentVolume.storageClass }} +{{- if (eq "-" .Values.persistentVolume.storageClass) }} + storageClassName: "" +{{- else }} + storageClassName: "{{ .Values.persistentVolume.storageClass }}" +{{- end }} +{{- end }} + resources: + requests: + storage: "{{ .Values.persistentVolume.size }}" \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml new file mode 100644 index 00000000000..88d01b24a61 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml @@ -0,0 +1,51 @@ +apiVersion: v1 +kind: Service +metadata: +{{- if .Values.service.annotations }} + annotations: +{{ toYaml .Values.service.annotations | indent 4 }} +{{- end }} + labels: + app: {{ template "prometheus-statsd-exporter.fullname" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + component: "{{ .Chart.Name }}" + heritage: {{ .Release.Service }} + release: {{ .Release.Name }} +{{- if .Values.service.labels }} +{{ toYaml .Values.service.labels | indent 4 }} +{{- end }} + name: {{ template "prometheus-statsd-exporter.fullname" . }} +spec: +{{- if .Values.service.clusterIP }} + clusterIP: {{ .Values.service.clusterIP }} +{{- end }} +{{- if .Values.service.externalIPs }} + externalIPs: +{{ toYaml .Values.service.externalIPs | indent 4 }} +{{- end }} +{{- if .Values.service.loadBalancerIP }} + loadBalancerIP: {{ .Values.service.loadBalancerIP }} +{{- end }} +{{- if .Values.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: + {{- range $cidr := .Values.service.loadBalancerSourceRanges }} + - {{ $cidr }} + {{- end }} +{{- end }} + ports: + - name: metrics + port: {{ .Values.service.metricsPort }} + protocol: TCP + targetPort: 9102 + - name: statsd-tcp + port: {{ .Values.service.statsdPort }} + protocol: TCP + targetPort: 9125 + - name: statsd-udp + port: {{ .Values.service.statsdPort }} + protocol: UDP + targetPort: 9125 + selector: + app: {{ template "prometheus-statsd-exporter.name" . }} + release: {{ .Release.Name }} + type: "{{ .Values.service.type }}" \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml new file mode 100644 index 00000000000..8e807778359 --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.enable -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + app: {{ template "prometheus-statsd-exporter.fullname" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + component: "{{ .Values.serviceaccount.componentName }}" + heritage: {{ .Release.Service }} + release: {{ .Release.Name }} + name: {{ template "prometheus-statsd-exporter.fullname" . }} +{{- end -}} \ No newline at end of file diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml new file mode 100644 index 00000000000..f2d523771ea --- /dev/null +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml @@ -0,0 +1,113 @@ +image: + repository: prom/statsd-exporter + tag: v0.12.1 + pullPolicy: IfNotPresent + +service: + annotations: {} + labels: {} + clusterIP: "" + ## List of IP addresses at which the alertmanager service is available + ## Ref: https://kubernetes.io/docs/user-guide/services/#external-ips + ## + externalIPs: [] + loadBalancerIP: "" + loadBalancerSourceRanges: [] + servicePort: 80 + type: ClusterIP + metricsPort: 9102 + statsdPort: 9125 + +statsdexporter: + podAnnotations: + + extraArgs: {} + # - --persistence.file=data-perst + + resources: {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + + # limits: + # cpu: 500m + # memory: 1Gi + # requests: + # cpu: 500m + # memory: 512Mi + + ingress: + ## Enable Ingress. + ## + enabled: false + + ## Annotations. + ## + # annotations: + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: 'true' + + ## Hostnames. + ## Must be provided if Ingress is enabled. + ## + # hosts: + # - prometheusstatsdexoirter.domain.com + + ## TLS configuration. + ## Secrets must be manually created in the namespace. + ## + # tls: + # - secretName: prometheusstatsdexoirter-tls + # hosts: + # - prometheusstatsdexoirter.domain.com + + tolerations: {} + # - effect: NoSchedule + # operator: Exists + + + replicaCount: 1 + + ## Affinity for pod assignment + ## Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity + affinity: {} + nodeSelector: {} + +serviceAccount: + ## If false, serviceaccount will not be installed + ## + enable: false + componentName: prometheus-statsd-exporter + +persistentVolume: + name: storage-volume + claimName: prometheus-statsd-exporter + + ## If true, statsd-export will create/use a Persistent Volume Claim + ## If false, use emptyDir + ## + enabled: true + + ## statsd-exporter data Persistent Volume access modes + ## Must match those of existing PV or dynamic provisioner + ## Ref: http://kubernetes.io/docs/user-guide/persistent-volumes/ + ## + accessModes: + - ReadWriteOnce + + ## statsd-exporter data Persistent Volume Claim annotations + ## + annotations: {} + + ## statsd-exporter data Persistent Volume existing claim name + ## Requires statsd-exporter.persistentVolume.enabled: true + ## If defined, PVC must be created manually before volume will be bound + existingClaim: "" + + ## statsd-exporter data Persistent Volume mount root path + ## + mountPath: /data + size: 20Gi + subPath: "" + storageClass: {} \ No newline at end of file diff --git a/infra/charts/feast/requirements.lock b/infra/charts/feast/requirements.lock new file mode 100644 index 00000000000..38305f47fdc --- /dev/null +++ b/infra/charts/feast/requirements.lock @@ -0,0 +1,12 @@ +dependencies: +- name: feast-core + repository: "" + version: 0.3.0 +- name: feast-serving + repository: "" + version: 0.3.0 +- name: feast-serving + repository: "" + version: 0.3.0 +digest: sha256:36d703bde61d079ca80738e820840f28d6dd8408b296942d5a3e4767d12378f7 +generated: "2019-11-18T11:26:02.622058+08:00" diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml new file mode 100644 index 00000000000..e4bf9044e5f --- /dev/null +++ b/infra/charts/feast/requirements.yaml @@ -0,0 +1,12 @@ +dependencies: +- name: feast-core + version: 0.3.0 + condition: feast-core.enabled +- name: feast-serving + alias: feast-serving-batch + version: 0.3.0 + condition: feast-serving-batch.enabled +- name: feast-serving + alias: feast-serving-online + version: 0.3.0 + condition: feast-serving-online.enabled diff --git a/infra/charts/feast/values-demo.yaml b/infra/charts/feast/values-demo.yaml new file mode 100644 index 00000000000..9212070eb5d --- /dev/null +++ b/infra/charts/feast/values-demo.yaml @@ -0,0 +1,70 @@ +# The following are values for installing Feast for demonstration purpose: +# - Persistence is disabled since for demo purpose data is not expected +# to be durable +# - Only online serving (no batch serving) is installed to remove dependency +# on Google Cloud services. Batch serving requires BigQuery dependency. +# - Replace all occurrences of "feast.example.com" with the domain name or +# external IP pointing to your cluster +# + +feast-core: + enabled: true + + gcpServiceAccount: + useExistingSecret: false + + service: + type: NodePort + grpc: + nodePort: 32090 + + + resources: + requests: + cpu: 250m + memory: 256Mi + + postgresql: + persistence: + enabled: false + + + kafka: + enabled: true + persistence: + enabled: false + external: + enabled: true + type: NodePort + domain: feast.example.com + configurationOverrides: + "advertised.listeners": |- + EXTERNAL://feast.example.com:$((31090 + ${KAFKA_BROKER_ID})) + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + + application.yaml: + feast: + stream: + options: + bootstrapServers: feast.example.com:31090 + +feast-serving-online: + enabled: true + redis: + enabled: true + + service: + type: NodePort + grpc: + nodePort: 32091 + + store.yaml: + name: redis + type: REDIS + subscriptions: + - name: "*" + version: ">0" + +feast-serving-batch: + enabled: false diff --git a/infra/charts/feast/values-external-store.yaml b/infra/charts/feast/values-external-store.yaml new file mode 100644 index 00000000000..d012bcec56c --- /dev/null +++ b/infra/charts/feast/values-external-store.yaml @@ -0,0 +1,5 @@ +# TODO @dheryanto +# +# The following are sample values for installing Feast without setting up +# Kafka and Redis stores. In other words, using Feast with external stream +# source and stores. diff --git a/infra/charts/feast/values-production.yaml b/infra/charts/feast/values-production.yaml new file mode 100644 index 00000000000..6b53dc19ea8 --- /dev/null +++ b/infra/charts/feast/values-production.yaml @@ -0,0 +1,4 @@ +# TODO @dheryanto +# +# The following are sample values for installing Feast for typical production +# environment. diff --git a/infra/charts/feast/values.yaml b/infra/charts/feast/values.yaml new file mode 100644 index 00000000000..8a0e2ff0666 --- /dev/null +++ b/infra/charts/feast/values.yaml @@ -0,0 +1,100 @@ +# Feast deployment installs the following components: +# - Feast Core +# - Feast Serving Online +# - Feast Serving Batch +# +# The configuration for different components can be referenced from: +# - charts/feast-core/values.yaml +# - charts/feast-serving/values.yaml +# +# Note that "feast-serving-online" and "feast-serving-batch" are +# aliases to "feast-serving" chart since in typical scenario two instances +# of Feast Serving: online and batch will be deployed. Both described +# using the same chart "feast-serving". +# +# The following are default values for typical Feast deployment, but not +# for production setting. Refer to "values-production.yaml" for recommended +# values in production environment. +# +# Note that the import job by default uses DirectRunner +# https://beam.apache.org/documentation/runners/direct/ +# in this configuration since it allows Feast to run in more environments +# (unlike DataflowRunner which requires Google Cloud services). +# +# A secret containing Google Cloud service account JSON key is required +# in this configuration. +# https://cloud.google.com/iam/docs/creating-managing-service-accounts +# +# The Google Cloud service account must have the following roles: +# - bigquery.dataEditor +# - bigquery.jobUser +# +# Assuming a service account JSON key file has been downloaded to +# (please name the file key.json): +# /home/user/key.json +# +# Run the following command to create the secret in your Kubernetes cluster: +# +# kubectl create secret generic feast-gcp-service-account \ +# --from-file=/home/user/key.json +# + +feast-core: + enabled: true + jvmOptions: + - -Xms1024m + - -Xmx1024m + resources: + requests: + cpu: 1000m + memory: 1024Mi + gcpServiceAccount: + useExistingSecret: true + +feast-serving-online: + enabled: true + redis: + enabled: true + jvmOptions: + - -Xms1024m + - -Xmx1024m + resources: + requests: + cpu: 500m + memory: 1024Mi + store.yaml: + name: redis + type: REDIS + redis_config: + port: 6379 + subscriptions: + - name: "*" + version: ">0" + +feast-serving-batch: + enabled: true + redis: + enabled: false + jvmOptions: + - -Xms1024m + - -Xmx1024m + resources: + requests: + cpu: 500m + memory: 1024Mi + gcpServiceAccount: + useExistingSecret: true + application.yaml: + feast: + jobs: + staging-location: gs://bucket/path + store-type: REDIS + store.yaml: + name: bigquery + type: BIGQUERY + bigquery_config: + project_id: PROJECT_ID + dataset_id: DATASET_ID + subscriptions: + - name: "*" + version: ">0" diff --git a/infra/docker/core/Dockerfile b/infra/docker/core/Dockerfile new file mode 100644 index 00000000000..c4cfe34b71b --- /dev/null +++ b/infra/docker/core/Dockerfile @@ -0,0 +1,40 @@ +# ============================================================ +# Build stage 1: Builder +# ============================================================ + +FROM maven:3.6-jdk-8-slim as builder +ARG REVISION=dev +COPY . /build +WORKDIR /build +# +# Setting Maven repository .m2 directory relative to /build folder gives the +# user to optionally use cached repository when building the image by copying +# the existing .m2 directory to $FEAST_REPO_ROOT/.m2 +# +ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false" +RUN mvn --also-make --projects core,ingestion -Drevision=$REVISION \ + -DskipTests=true --batch-mode package +# +# Unpack the jar and copy the files into production Docker image +# for faster startup time when starting Dataflow jobs from Feast Core. +# This is because we need to stage the classes and dependencies when using Dataflow. +# The final size of the production image will be bigger but it seems +# a good tradeoff between speed and size. +# +# https://github.com/gojek/feast/pull/291 +RUN apt-get -qq update && apt-get -y install unar && \ + unar /build/core/target/feast-core-$REVISION.jar -o /build/core/target/ + +# ============================================================ +# Build stage 2: Production +# ============================================================ + +FROM openjdk:8-jre as production +ARG REVISION=dev +COPY --from=builder /build/core/target/feast-core-$REVISION.jar /opt/feast/feast-core.jar +COPY --from=builder /build/core/target/feast-core-$REVISION /opt/feast/feast-core +CMD ["java",\ + "-Xms2048m",\ + "-Xmx2048m",\ + "-jar",\ + "/opt/feast/feast-core.jar"] diff --git a/infra/docker/serving/Dockerfile b/infra/docker/serving/Dockerfile new file mode 100644 index 00000000000..3517183d782 --- /dev/null +++ b/infra/docker/serving/Dockerfile @@ -0,0 +1,36 @@ +# ============================================================ +# Build stage 1: Builder +# ============================================================ + +FROM maven:3.6-jdk-8-slim as builder +ARG REVISION=dev +COPY . /build +WORKDIR /build +# +# Setting Maven repository .m2 directory relative to /build folder gives the +# user to optionally use cached repository when building the image by copying +# the existing .m2 directory to $FEAST_REPO_ROOT/.m2 +# +ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false" +RUN mvn --also-make --projects serving -Drevision=$REVISION \ + -DskipTests=true --batch-mode package + +# ============================================================ +# Build stage 2: Production +# ============================================================ + +FROM openjdk:8-jre-alpine as production +ARG REVISION=dev +# +# Download grpc_health_probe to run health check for Feast Serving +# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ +# +RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ + -O /usr/bin/grpc-health-probe && \ + chmod +x /usr/bin/grpc-health-probe +COPY --from=builder /build/serving/target/feast-serving-$REVISION.jar /opt/feast/feast-serving.jar +CMD ["java",\ + "-Xms1024m",\ + "-Xmx1024m",\ + "-jar",\ + "/opt/feast/feast-serving.jar"] diff --git a/ingestion/pom.xml b/ingestion/pom.xml index faf60b5a955..eb892335180 100644 --- a/ingestion/pom.xml +++ b/ingestion/pom.xml @@ -19,24 +19,15 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 + feast feast-parent ${revision} - feast-ingestion - jar - Feast Ingestion - - UTF-8 - 2.9.0 - 1.35.0 - 1.27.0 - 1.13.1 - 4.1.0 - 2.2.2.RELEASE - + Feast Ingestion + feast-ingestion @@ -60,13 +51,37 @@ implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> - feast.ingestion.ImportJob + feast.ingestion.ImportJobOld reference.conf + + + + org.springframework + org.springframework.vendor + + + io.grpc + io.grpc.vendor + + + io.opencensus + io.opencensus.vendor + + + feast.core + feast.core.vendor + + + com.google.cloud.bigquery + com.google.cloud.bigquery.vendor + + @@ -75,120 +90,93 @@ - - org.hibernate.validator - hibernate-validator - 6.0.13.Final - org.glassfish javax.el - 3.0.1-b09 + 3.0.0 - com.google.auto.service - auto-service - 1.0-rc4 + javax.validation + validation-api + 2.0.1.Final - - com.google.cloud - google-cloud-nio - 0.83.0-alpha + org.hibernate.validator + hibernate-validator + 6.0.13.Final - io.grpc - grpc-netty - ${grpcVersion} + com.google.auto.value + auto-value-annotations + 1.6.6 - io.grpc - grpc-stub - ${grpcVersion} + com.google.auto.value + auto-value + 1.6.6 + provided + io.grpc - grpc-protobuf - ${grpcVersion} + grpc-stub - com.google.cloud google-cloud-storage - ${com.google.cloud.version} com.google.cloud - google-cloud-core - ${com.google.cloud.version} + google-cloud-bigquery - com.google.cloud - google-cloud-bigquery - ${com.google.cloud.version} + org.hamcrest + hamcrest-core - - org.hamcrest hamcrest-library - 1.3 - test org.mockito - mockito-all - 1.10.19 - test + mockito-core - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - 2.9.6 - - - com.fasterxml.jackson.module - jackson-module-jsonSchema - 2.9.6 + com.fasterxml.jackson.core + jackson-annotations - - - com.google.inject - guice - ${guice.version} + com.fasterxml.jackson.core + jackson-core - com.google.inject.extensions - guice-multibindings - ${guice.version} + com.fasterxml.jackson.core + jackson-databind - - com.google.http-client - google-http-client - ${com.google.httpclient.version} + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml - com.google.http-client - google-http-client-gson - ${com.google.httpclient.version} + com.fasterxml.jackson.module + jackson-module-jsonSchema - com.google.protobuf protobuf-java - ${protobufVersion} - + + com.google.protobuf + protobuf-java-util + junit @@ -198,31 +186,24 @@ - joda-time - joda-time - 2.9.9 + org.apache.kafka + kafka-clients + - com.google.cloud.bigtable - bigtable-hbase-beam - 1.8.0 - - - org.slf4j - slf4j-log4j12 - - + joda-time + joda-time org.apache.beam - beam-sdks-java-io-google-cloud-platform + beam-runners-google-cloud-dataflow-java ${org.apache.beam.version} org.apache.beam - beam-runners-google-cloud-dataflow-java + beam-runners-direct-java ${org.apache.beam.version} @@ -235,119 +216,44 @@ redis.clients jedis - 2.9.0 - - - com.google.apis - google-api-services-bigquery - v2-rev394-1.22.0 - - - - com.google.code.gson - gson - 2.8.5 - - - commons-codec - commons-codec - 1.11 - - - org.projectlombok - lombok - 1.16.22 - provided - - - org.apache.commons - commons-csv - 1.5 - - - - org.slf4j slf4j-api - 1.7.25 + + ch.qos.logback logback-classic 1.2.3 + runtime - - - com.fasterxml.jackson.dataformat - jackson-dataformat-csv - 2.9.6 - test - - - - - org.jtwig - jtwig-core - 5.87.0.RELEASE - - + com.github.kstyrc embedded-redis - 0.6 - test + - org.testcontainers - testcontainers - 1.9.1 + org.apache.kafka + kafka_2.12 test + com.google.guava guava - 26.0-jre - compile - - - org.apache.kafka - kafka-clients - 2.0.0 - - - org.springframework.boot - spring-boot-starter-test - 2.1.1.RELEASE - test - - - org.springframework.kafka - spring-kafka - ${spring.kafka.version} - test - org.springframework.kafka - spring-kafka-test - ${spring.kafka.version} - test - - - org.apache.beam - beam-runners-flink_2.11 - ${org.apache.beam.version} - - - org.apache.beam - beam-runners-direct-java - ${org.apache.beam.version} - runtime + com.datadoghq + java-dogstatsd-client + 2.8.1 + diff --git a/ingestion/src/main/java/feast/SerializableCache.java b/ingestion/src/main/java/feast/SerializableCache.java deleted file mode 100644 index d5c654bb50d..00000000000 --- a/ingestion/src/main/java/feast/SerializableCache.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import java.io.Serializable; -import java.time.Duration; -import java.util.concurrent.ExecutionException; -import lombok.Builder; -import org.apache.beam.sdk.transforms.SerializableFunction; - -@Builder -public class SerializableCache implements Serializable { - private transient LoadingCache cache; - private SerializableFunction loadingFunction; - private Integer maximumSize; - private Duration expireAfterAccess; - - public VALUE get(KEY key) { - try { - return getCache().get(key); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - } - - private LoadingCache getCache() { - if (cache == null) { - CacheBuilder builder = CacheBuilder.newBuilder(); - if (maximumSize != null) { - builder = builder.maximumSize(maximumSize); - } - if (expireAfterAccess != null) { - builder = builder.expireAfterAccess(expireAfterAccess); - } - cache = - builder.build( - new CacheLoader() { - @Override - public VALUE load(KEY key) throws Exception { - return loadingFunction.apply(key); - } - }); - } - return cache; - } -} diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 5a8dec382b6..d919a18ac5c 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -1,228 +1,152 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - package feast.ingestion; -import com.google.api.services.bigquery.model.TableRow; -import com.google.common.base.Strings; -import com.google.inject.Guice; -import com.google.inject.Inject; -import com.google.inject.Injector; import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.ingestion.boot.ImportJobModule; -import feast.ingestion.boot.PipelineModule; -import feast.ingestion.config.ImportJobSpecsSupplier; -import feast.ingestion.metrics.FeastMetrics; -import feast.ingestion.model.Specs; -import feast.ingestion.options.ImportJobPipelineOptions; -import feast.ingestion.options.JobOptions; -import feast.ingestion.transform.CoalesceFeatureRowExtended; -import feast.ingestion.transform.ErrorsStoreTransform; -import feast.ingestion.transform.ReadFeaturesTransform; -import feast.ingestion.transform.ServingStoreTransform; -import feast.ingestion.transform.ToFeatureRowExtended; -import feast.ingestion.transform.ValidateTransform; -import feast.ingestion.transform.WarehouseStoreTransform; -import feast.ingestion.transform.fn.ConvertTypesDoFn; -import feast.ingestion.transform.fn.LoggerDoFn; -import feast.ingestion.values.PFeatureRows; -import feast.options.OptionsParser; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.SourceProto.Source; +import feast.core.StoreProto.Store; +import feast.ingestion.options.ImportOptions; +import feast.ingestion.transform.ReadFromSource; +import feast.ingestion.transform.ValidateFeatureRows; +import feast.ingestion.transform.WriteFailedElementToBigQuery; +import feast.ingestion.transform.WriteToStore; +import feast.ingestion.transform.metrics.WriteMetricsTransform; +import feast.ingestion.utils.ResourceUtil; +import feast.ingestion.utils.SpecUtil; +import feast.ingestion.utils.StoreUtil; +import feast.ingestion.values.FailedElement; import feast.types.FeatureRowProto.FeatureRow; -import java.util.Arrays; -import java.util.Random; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.runners.dataflow.DataflowPipelineJob; -import org.apache.beam.runners.dataflow.DataflowRunner; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; -import org.apache.beam.sdk.PipelineRunner; -import org.apache.beam.sdk.coders.CoderRegistry; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder; import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.transforms.Sample; -import org.apache.beam.sdk.transforms.windowing.AfterWatermark; -import org.apache.beam.sdk.transforms.windowing.FixedWindows; -import org.apache.beam.sdk.transforms.windowing.Window; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollection.IsBounded; -import org.apache.beam.sdk.values.TypeDescriptor; -import org.apache.commons.codec.digest.DigestUtils; -import org.joda.time.DateTime; -import org.joda.time.Duration; -import org.slf4j.event.Level; - -@Slf4j -public class ImportJob { - - private static Random random = new Random(System.currentTimeMillis()); - - private final Pipeline pipeline; - private final ImportJobSpecs importJobSpecs; - private final ReadFeaturesTransform readFeaturesTransform; - private final ServingStoreTransform servingStoreTransform; - private final WarehouseStoreTransform warehouseStoreTransform; - private final ErrorsStoreTransform errorsStoreTransform; - private final boolean dryRun; - private final ImportJobPipelineOptions options; - private final Specs specs; - - @Inject - public ImportJob( - Pipeline pipeline, - ImportJobSpecs importJobSpecs, - ReadFeaturesTransform readFeaturesTransform, - ServingStoreTransform servingStoreTransform, - WarehouseStoreTransform warehouseStoreTransform, - ErrorsStoreTransform errorsStoreTransform, - ImportJobPipelineOptions options, - Specs specs) { - this.pipeline = pipeline; - this.importJobSpecs = importJobSpecs; - this.readFeaturesTransform = readFeaturesTransform; - this.servingStoreTransform = servingStoreTransform; - this.warehouseStoreTransform = warehouseStoreTransform; - this.errorsStoreTransform = errorsStoreTransform; - this.dryRun = options.isDryRun(); - this.options = options; - this.specs = specs; - } - - public static void main(String[] args) { - mainWithResult(args); - } +import org.apache.beam.sdk.options.PipelineOptionsValidator; +import org.apache.beam.sdk.values.PCollectionTuple; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; - public static PipelineResult mainWithResult(String[] args) { - log.info("Arguments: " + Arrays.toString(args)); - ImportJobPipelineOptions options = - PipelineOptionsFactory.fromArgs(args).withValidation().as(ImportJobPipelineOptions.class); - if (options.getJobName().isEmpty()) { - options.setJobName(generateName()); - } - log.info("options: " + options.toString()); - ImportJobSpecs importJobSpecs = new ImportJobSpecsSupplier(options.getWorkspace()) - .get(); - Injector injector = - Guice.createInjector(new ImportJobModule(options, importJobSpecs), new PipelineModule()); - ImportJob job = injector.getInstance(ImportJob.class); - - job.expand(); - return job.run(); - } +public class ImportJob { - private static String generateName() { - byte[] bytes = new byte[7]; - random.nextBytes(bytes); - String randomHex = DigestUtils.sha1Hex(bytes).substring(0, 7); - return String.format("feast-importjob-%s-%s", DateTime.now().getMillis(), randomHex); + // Tag for main output containing Feature Row that has been successfully processed. + private static final TupleTag FEATURE_ROW_OUT = new TupleTag() { + }; + + // Tag for deadletter output containing elements and error messages from invalid input/transform. + private static final TupleTag DEADLETTER_OUT = new TupleTag() { + }; + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ImportJob.class); + + /** + * @param args arguments to be passed to Beam pipeline + * @throws InvalidProtocolBufferException if options passed to the pipeline are invalid + */ + public static void main(String[] args) throws InvalidProtocolBufferException { + ImportOptions options = + PipelineOptionsFactory.fromArgs(args).withValidation().create().as(ImportOptions.class); + runPipeline(options); } - public void expand() { - CoderRegistry coderRegistry = pipeline.getCoderRegistry(); - coderRegistry.registerCoderForType( - TypeDescriptor.of(FeatureRow.class), ProtoCoder.of(FeatureRow.class)); - coderRegistry.registerCoderForType( - TypeDescriptor.of(FeatureRowExtended.class), ProtoCoder.of(FeatureRowExtended.class)); - coderRegistry.registerCoderForType(TypeDescriptor.of(TableRow.class), TableRowJsonCoder.of()); - - JobOptions jobOptions = OptionsParser - .parse(importJobSpecs.getImportSpec().getJobOptionsMap(), JobOptions.class); - - try { - log.info(JsonFormat.printer().print(importJobSpecs)); - } catch (InvalidProtocolBufferException e) { - // pass - } - specs.validate(); - - PCollection features = pipeline.apply("Read", readFeaturesTransform); - if (jobOptions.getSampleLimit() > 0) { - features = features.apply(Sample.any(jobOptions.getSampleLimit())); - } - - PCollection featuresExtended = - features.apply("Wrap with attempt data", new ToFeatureRowExtended()); - - PFeatureRows pFeatureRows = PFeatureRows.of(featuresExtended); - pFeatureRows = pFeatureRows.applyDoFn("Convert feature types", new ConvertTypesDoFn(specs)); - pFeatureRows = pFeatureRows.apply("Validate features", new ValidateTransform(specs)); - - log.info( - "A sample of size 1 of incoming rows from MAIN and ERRORS will logged every 30 seconds for visibility"); - logNRows(pFeatureRows, "Output sample", 1, Duration.standardSeconds(30)); - - PCollection warehouseRows = pFeatureRows.getMain(); - PCollection servingRows = pFeatureRows.getMain(); - PCollection errorRows = pFeatureRows.getErrors(); - if (jobOptions.isCoalesceRowsEnabled()) { - // Should we merge and dedupe rows before writing to the serving store? - servingRows = servingRows.apply("Coalesce Rows", new CoalesceFeatureRowExtended( - jobOptions.getCoalesceRowsDelaySeconds(), - jobOptions.getCoalesceRowsTimeoutSeconds())); - } - - if (!dryRun) { - servingRows.apply("Write to Serving Stores", servingStoreTransform); - if (!Strings.isNullOrEmpty(importJobSpecs.getWarehouseStorageSpec().getId())) { - warehouseRows.apply("Write to Warehouse Stores", warehouseStoreTransform); + @SuppressWarnings("UnusedReturnValue") + public static PipelineResult runPipeline(ImportOptions options) + throws InvalidProtocolBufferException { + /* + * Steps: + * 1. Read messages from Feast Source as FeatureRow + * 2. Validate the feature rows to ensure the schema matches what is registered to the system + * 3. Write FeatureRow to the corresponding Store + * 4. Write elements that failed to be processed to a dead letter queue. + * 5. Write metrics to a metrics sink + */ + + PipelineOptionsValidator.validate(ImportOptions.class, options); + Pipeline pipeline = Pipeline.create(options); + + log.info("Starting import job with settings: \n{}", options.toString()); + + List featureSetSpecs = + SpecUtil.parseFeatureSetSpecJsonList(options.getFeatureSetSpecJson()); + List stores = SpecUtil.parseStoreJsonList(options.getStoreJson()); + + for (Store store : stores) { + List subscribedFeatureSets = + SpecUtil.getSubscribedFeatureSets(store.getSubscriptionsList(), featureSetSpecs); + + // Generate tags by key + Map> featureSetTagsByKey = subscribedFeatureSets.stream() + .map(fs -> { + String id = String.format("%s:%s", fs.getName(), fs.getVersion()); + return Pair.of(id, new TupleTag(id) { + }); + }) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + + // TODO: make the source part of the job initialisation options + Source source = subscribedFeatureSets.get(0).getSource(); + + // Step 1. Read messages from Feast Source as FeatureRow. + PCollectionTuple convertedFeatureRows = + pipeline.apply( + "ReadFeatureRowFromSource", + ReadFromSource.newBuilder() + .setSource(source) + .setFeatureSetTagByKey(featureSetTagsByKey) + .setFailureTag(DEADLETTER_OUT) + .build()); + + for (FeatureSetSpec featureSet : subscribedFeatureSets) { + // Ensure Store has valid configuration and Feast can access it. + StoreUtil.setupStore(store, featureSet); + String id = String.format("%s:%s", featureSet.getName(), featureSet.getVersion()); + + // Step 2. Validate incoming FeatureRows + PCollectionTuple validatedRows = convertedFeatureRows + .get(featureSetTagsByKey.get(id)) + .apply(ValidateFeatureRows.newBuilder() + .setFeatureSetSpec(featureSet) + .setSuccessTag(FEATURE_ROW_OUT) + .setFailureTag(DEADLETTER_OUT) + .build()); + + // Step 3. Write FeatureRow to the corresponding Store. + validatedRows + .get(FEATURE_ROW_OUT) + .apply( + "WriteFeatureRowToStore", + WriteToStore.newBuilder().setFeatureSetSpec(featureSet).setStore(store).build()); + + // Step 4. Write FailedElements to a dead letter table in BigQuery. + if (options.getDeadLetterTableSpec() != null) { + convertedFeatureRows + .get(DEADLETTER_OUT) + .apply( + "WriteFailedElements_ReadFromSource", + WriteFailedElementToBigQuery.newBuilder() + .setJsonSchema(ResourceUtil.getDeadletterTableSchemaJson()) + .setTableSpec(options.getDeadLetterTableSpec()) + .build()); + + validatedRows + .get(DEADLETTER_OUT) + .apply("WriteFailedElements_ValidateRows", + WriteFailedElementToBigQuery.newBuilder() + .setJsonSchema(ResourceUtil.getDeadletterTableSchemaJson()) + .setTableSpec(options.getDeadLetterTableSpec()) + .build()); + } + + // Step 5. Write metrics to a metrics sink. + validatedRows + .apply("WriteMetrics", WriteMetricsTransform.newBuilder() + .setFeatureSetSpec(featureSet) + .setStoreName(store.getName()) + .setSuccessTag(FEATURE_ROW_OUT) + .setFailureTag(DEADLETTER_OUT) + .build()); } - errorRows.apply(errorsStoreTransform); - } - } - - public PipelineResult run() { - PipelineResult result = pipeline.run(); - log.info(String.format("FeastImportJobId:%s", this.retrieveId(result))); - return result; - } - - public void logNRows(PFeatureRows pFeatureRows, String name, long limit, Duration period) { - PCollection main = pFeatureRows.getMain(); - PCollection errors = pFeatureRows.getErrors(); - - if (main.isBounded().equals(IsBounded.UNBOUNDED)) { - Window minuteWindow = - Window.into(FixedWindows.of(period)) - .triggering(AfterWatermark.pastEndOfWindow()) - .discardingFiredPanes() - .withAllowedLateness(Duration.ZERO); - main = main.apply(minuteWindow); - errors = errors.apply(minuteWindow); } - main.apply("metrics.store.lag", ParDo.of(FeastMetrics.lagUpdateDoFn())); - - main.apply("Sample success", Sample.any(limit)) - .apply("Log success sample", ParDo.of(new LoggerDoFn(Level.INFO, name + " MAIN "))); - errors - .apply("Sample errors", Sample.any(limit)) - .apply("Log errors sample", ParDo.of(new LoggerDoFn(Level.ERROR, name + " ERRORS "))); - } - - private String retrieveId(PipelineResult result) { - Class> runner = options.getRunner(); - if (runner.isAssignableFrom(DataflowRunner.class)) { - return ((DataflowPipelineJob) result).getJobId(); - } else { - return this.options.getJobName(); - } + return pipeline.run(); } } diff --git a/ingestion/src/main/java/feast/ingestion/boot/ImportJobModule.java b/ingestion/src/main/java/feast/ingestion/boot/ImportJobModule.java deleted file mode 100644 index 0d4b96519aa..00000000000 --- a/ingestion/src/main/java/feast/ingestion/boot/ImportJobModule.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.boot; - -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.google.inject.Singleton; -import feast.ingestion.model.Specs; -import feast.ingestion.options.ImportJobPipelineOptions; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.store.errors.FeatureErrorsFactory; -import feast.store.errors.FeatureErrorsFactoryService; -import feast.store.serving.FeatureServingFactory; -import feast.store.serving.FeatureServingFactoryService; -import feast.store.warehouse.FeatureWarehouseFactory; -import feast.store.warehouse.FeatureWarehouseFactoryService; -import java.util.List; -import org.apache.beam.sdk.options.PipelineOptions; - -/** - * An ImportJobModule is a Guice module for creating dependency injection bindings. - */ -public class ImportJobModule extends AbstractModule { - - private final ImportJobPipelineOptions options; - private ImportJobSpecs importJobSpecs; - - public ImportJobModule(ImportJobPipelineOptions options, ImportJobSpecs importJobSpecs) { - this.options = options; - this.importJobSpecs = importJobSpecs; - } - - @Override - protected void configure() { - bind(ImportJobPipelineOptions.class).toInstance(options); - bind(PipelineOptions.class).toInstance(options); - bind(ImportJobSpecs.class).toInstance(importJobSpecs); - } - - @Provides - @Singleton - Specs provideSpecs() { - return Specs.of(options.getJobName(), importJobSpecs); - } - - @Provides - @Singleton - List provideWarehouseStores() { - return FeatureWarehouseFactoryService.getAll(); - } - - @Provides - @Singleton - List provideServingStores() { - return FeatureServingFactoryService.getAll(); - } - - @Provides - @Singleton - List provideErrorsStores() { - return FeatureErrorsFactoryService.getAll(); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/boot/PipelineModule.java b/ingestion/src/main/java/feast/ingestion/boot/PipelineModule.java deleted file mode 100644 index 0a3682e1c0b..00000000000 --- a/ingestion/src/main/java/feast/ingestion/boot/PipelineModule.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.boot; - -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.google.inject.Singleton; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.options.PipelineOptions; - -public class PipelineModule extends AbstractModule { - - @Provides - @Singleton - Pipeline providePipeline(PipelineOptions options) { - return Pipeline.create(options); - } - - @Override - protected void configure() {} -} diff --git a/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java b/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java new file mode 100644 index 00000000000..6c0e9a30556 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java @@ -0,0 +1,93 @@ +package feast.ingestion.coders; + +import feast.ingestion.values.FailsafeFeatureRow; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.CustomCoder; +import org.apache.beam.sdk.coders.NullableCoder; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.values.TypeDescriptor; +import org.apache.beam.sdk.values.TypeParameter; + +/** + * Adapted from: + * https://github.com/GoogleCloudPlatform/DataflowTemplates/blob/master/src/main/java/com/google/cloud/teleport/coders/FailsafeElementCoder.java + * + *

The {@link FailsafeFeatureRowCoder} encodes and decodes {@link FailsafeFeatureRow} objects. + * + *

This coder is necessary until Avro supports parameterized types (AVRO-1571) without requiring to + * explicitly specifying the schema for the type. + * + * @param The type of the original payload to be encoded. + * @param The type of the current payload to be encoded. + */ +public class FailsafeFeatureRowCoder + extends CustomCoder> { + + private static final NullableCoder STRING_CODER = NullableCoder.of(StringUtf8Coder.of()); + private final Coder originalPayloadCoder; + private final Coder currentPayloadCoder; + + private FailsafeFeatureRowCoder( + Coder originalPayloadCoder, Coder currentPayloadCoder) { + this.originalPayloadCoder = originalPayloadCoder; + this.currentPayloadCoder = currentPayloadCoder; + } + + public Coder getOriginalPayloadCoder() { + return originalPayloadCoder; + } + + public Coder getCurrentPayloadCoder() { + return currentPayloadCoder; + } + + public static FailsafeFeatureRowCoder of( + Coder originalPayloadCoder, Coder currentPayloadCoder) { + return new FailsafeFeatureRowCoder<>(originalPayloadCoder, currentPayloadCoder); + } + + @Override + public void encode(FailsafeFeatureRow value, OutputStream outStream) + throws IOException { + if (value == null) { + throw new CoderException("The FailsafeFeatureRowCoder cannot encode a null object!"); + } + + originalPayloadCoder.encode(value.getOriginalPayload(), outStream); + currentPayloadCoder.encode(value.getPayload(), outStream); + STRING_CODER.encode(value.getErrorMessage(), outStream); + STRING_CODER.encode(value.getStacktrace(), outStream); + } + + @Override + public FailsafeFeatureRow decode(InputStream inStream) throws IOException { + + OriginalT originalPayload = originalPayloadCoder.decode(inStream); + CurrentT currentPayload = currentPayloadCoder.decode(inStream); + String errorMessage = STRING_CODER.decode(inStream); + String stacktrace = STRING_CODER.decode(inStream); + + return FailsafeFeatureRow.of(originalPayload, currentPayload) + .setErrorMessage(errorMessage) + .setStacktrace(stacktrace); + } + + @Override + public List> getCoderArguments() { + return Arrays.asList(originalPayloadCoder, currentPayloadCoder); + } + + @Override + public TypeDescriptor> getEncodedTypeDescriptor() { + return new TypeDescriptor>() {}.where( + new TypeParameter() {}, originalPayloadCoder.getEncodedTypeDescriptor()) + .where(new TypeParameter() {}, currentPayloadCoder.getEncodedTypeDescriptor()); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/config/ImportJobSpecsSupplier.java b/ingestion/src/main/java/feast/ingestion/config/ImportJobSpecsSupplier.java deleted file mode 100644 index 030ca816ec0..00000000000 --- a/ingestion/src/main/java/feast/ingestion/config/ImportJobSpecsSupplier.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.config; - -import com.google.common.base.Strings; -import feast.ingestion.util.PathUtil; -import feast.ingestion.util.ProtoUtil; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import java.io.IOException; -import java.nio.file.Path; -import java.util.function.Supplier; - -public class ImportJobSpecsSupplier implements Supplier { - - public static final String IMPORT_JOB_SPECS_FILENAME = "importJobSpecs.yaml"; - - private Path importJobSpecsPath; - private ImportJobSpecs importJobSpecs; - - public ImportJobSpecsSupplier(String workspace) { - if (!Strings.isNullOrEmpty(workspace)) { - this.importJobSpecsPath = PathUtil.getPath(workspace).resolve(IMPORT_JOB_SPECS_FILENAME); - } - } - - private ImportJobSpecs create() { - try { - if (importJobSpecsPath != null) { - return ProtoUtil - .decodeProtoYamlFile(importJobSpecsPath, ImportJobSpecs.getDefaultInstance()); - } else { - return ImportJobSpecs.getDefaultInstance(); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public ImportJobSpecs get() { - if (importJobSpecs == null) { - importJobSpecs = create(); - } - return importJobSpecs; - } -} diff --git a/ingestion/src/main/java/feast/ingestion/exceptions/ErrorsHandler.java b/ingestion/src/main/java/feast/ingestion/exceptions/ErrorsHandler.java deleted file mode 100644 index 917acd94d20..00000000000 --- a/ingestion/src/main/java/feast/ingestion/exceptions/ErrorsHandler.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.exceptions; - -import org.apache.beam.sdk.transforms.DoFn.ProcessContext; -import feast.ingestion.values.PFeatureRows; -import feast.types.FeatureRowExtendedProto.Attempt; -import feast.types.FeatureRowExtendedProto.Error; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; - -public class ErrorsHandler { - - private ErrorsHandler() {} - - /** Resets the attempt count if that error has changed. */ - public static int checkAttemptCount(int numAttempts, Error lastError, Error thisError) { - if (lastError != null) { - if (!lastError.getCause().equals(thisError.getCause()) - || !lastError.getTransform().equals(thisError.getTransform())) { - return 0; - } - } - return numAttempts; - } - - public static void handleError( - ProcessContext context, FeatureRowExtended rowExtended, Error thisError) { - Attempt lastAttempt = rowExtended.getLastAttempt(); - Error lastError = lastAttempt.getError(); - - int numAttempts = checkAttemptCount(lastAttempt.getAttempts(), lastError, thisError); - - Attempt thisAttempt = - Attempt.newBuilder().setAttempts(numAttempts + 1).setError(thisError).build(); - - FeatureRowExtended outputRowExtended = - FeatureRowExtended.newBuilder().mergeFrom(rowExtended).setLastAttempt(thisAttempt).build(); - - context.output(PFeatureRows.ERRORS_TAG, outputRowExtended); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/exceptions/SpecNotFound.java b/ingestion/src/main/java/feast/ingestion/exceptions/SpecNotFound.java deleted file mode 100644 index 713a6dca71f..00000000000 --- a/ingestion/src/main/java/feast/ingestion/exceptions/SpecNotFound.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.exceptions; - -public class SpecNotFound extends RuntimeException { - - public SpecNotFound(String message) { - super(message); - } - - public SpecNotFound(String message, Throwable throwable) { - super(message, throwable); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/exceptions/TypeConversionException.java b/ingestion/src/main/java/feast/ingestion/exceptions/TypeConversionException.java deleted file mode 100644 index f1d2872b6d8..00000000000 --- a/ingestion/src/main/java/feast/ingestion/exceptions/TypeConversionException.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.exceptions; - -public class TypeConversionException extends RuntimeException { - - public TypeConversionException() { - super(); - } - - public TypeConversionException(String message, Throwable throwable) { - super(message, throwable); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/exceptions/ValidationException.java b/ingestion/src/main/java/feast/ingestion/exceptions/ValidationException.java deleted file mode 100644 index 7c87eb39dcd..00000000000 --- a/ingestion/src/main/java/feast/ingestion/exceptions/ValidationException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.exceptions; - -public class ValidationException extends RuntimeException { - - public ValidationException(String message, Throwable throwable) { - super(message, throwable); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java b/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java deleted file mode 100644 index d42be02fe38..00000000000 --- a/ingestion/src/main/java/feast/ingestion/metrics/FeastMetrics.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.metrics; - -import com.google.protobuf.util.Timestamps; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto; -import feast.types.FeatureRowProto.FeatureRow; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.metrics.Metrics; -import org.apache.beam.sdk.transforms.DoFn; - -public class FeastMetrics { - - public static final String FEAST_NAMESPACE = "feast"; - - private FeastMetrics() {} - - private static void inc(String name) { - Metrics.counter(FeastMetrics.FEAST_NAMESPACE, name).inc(); - } - - public static void update(String name, long value) { - Metrics.distribution(FeastMetrics.FEAST_NAMESPACE, name).update(value); - } - - public static void inc(FeatureRow row, String suffix) { - inc("row:" + suffix); - inc(String.format("entity:%s:%s", row.getEntityName(), suffix)); - for (Feature feature : row.getFeaturesList()) { - inc(String.format("feature:%s:%s", feature.getId(), suffix)); - } - } - - public static IncrRowExtendedFunc incrDoFn(String suffix) { - return new IncrRowExtendedFunc(suffix); - } - - public static CalculateLagMetricFunc lagUpdateDoFn() { - return new CalculateLagMetricFunc(); - } - - @AllArgsConstructor - public static class IncrRowExtendedFunc extends DoFn { - - private String suffix; - - @ProcessElement - public void processElement( - @Element FeatureRowExtended element, OutputReceiver out) { - inc(element.getRow(), suffix); - out.output(element); - } - } - - @AllArgsConstructor - public static class CalculateLagMetricFunc extends DoFn { - - @ProcessElement - public void processElement( - @Element FeatureRowExtended element, OutputReceiver out) { - FeatureRowProto.FeatureRow row = element.getRow(); - com.google.protobuf.Timestamp eventTimestamp = row.getEventTimestamp(); - - com.google.protobuf.Timestamp now = Timestamps.fromMillis(System.currentTimeMillis()); - long lagSeconds = now.getSeconds() - eventTimestamp.getSeconds(); - FeastMetrics.update("row:lag", lagSeconds); - out.output(element); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/model/Errors.java b/ingestion/src/main/java/feast/ingestion/model/Errors.java deleted file mode 100644 index ac1e648c856..00000000000 --- a/ingestion/src/main/java/feast/ingestion/model/Errors.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; -import java.io.UnsupportedEncodingException; -import org.apache.commons.codec.Charsets; -import feast.types.FeatureRowExtendedProto.Error; - -public class Errors { - public static Error toError(String transformName, String message, Throwable throwable) { - Error error = - Error.newBuilder() - .setTransform(transformName) - .setCause(throwable.getClass().getName()) - .setMessage(message) - .setStackTrace(getStackTraceString(throwable)) - .build(); - return error; - } - - public static Error toError(String transformName, Throwable throwable) { - return toError(transformName, throwable.getMessage(), throwable); - } - - public static String getStackTraceString(Throwable throwable) { - ByteArrayOutputStream byteouts = new ByteArrayOutputStream(); - throwable.printStackTrace(new PrintStream(byteouts)); - try { - return byteouts.toString(Charsets.UTF_8.name()); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/model/Features.java b/ingestion/src/main/java/feast/ingestion/model/Features.java deleted file mode 100644 index f70c57dfcd4..00000000000 --- a/ingestion/src/main/java/feast/ingestion/model/Features.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import feast.types.FeatureProto.Feature; -import feast.types.ValueProto.Value; - -public class Features { - private Features() {} - - public static Feature of(String id, Value value) { - return Feature.newBuilder().setId(id).setValue(value).build(); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/model/Specs.java b/ingestion/src/main/java/feast/ingestion/model/Specs.java deleted file mode 100644 index c818bb61805..00000000000 --- a/ingestion/src/main/java/feast/ingestion/model/Specs.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import static com.google.common.base.Predicates.not; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.io.Serializable; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Getter; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@ToString -public class Specs implements Serializable { - - @Getter - ImportSpec importSpec; - @Getter - Map entitySpecs; - @Getter - Map featureSpecs; - @Getter - StorageSpec servingStorageSpec; - @Getter - StorageSpec warehouseStorageSpec; - @Getter - StorageSpec errorsStoreSpec; - - @Getter - private String jobName; - - public Specs(String jobName, ImportJobSpecs importJobSpecs) { - this.jobName = jobName; - if (importJobSpecs != null) { - this.importSpec = importJobSpecs.getImportSpec(); - - this.entitySpecs = importJobSpecs.getEntitySpecsList().stream().collect(Collectors.toMap( - EntitySpec::getName, - entitySpec -> entitySpec - )); - this.featureSpecs = importJobSpecs.getFeatureSpecsList().stream().collect(Collectors.toMap( - FeatureSpec::getId, - featureSpec -> featureSpec - )); - - this.servingStorageSpec = importJobSpecs.getServingStorageSpec(); - this.warehouseStorageSpec = importJobSpecs.getWarehouseStorageSpec(); - this.errorsStoreSpec = importJobSpecs.getErrorsStorageSpec(); - } - } - - public static Specs of(String jobName, ImportJobSpecs importJobSpecs) { - return new Specs(jobName, filterToUtilized(importJobSpecs)); - } - - static ImportJobSpecs filterToUtilized(ImportJobSpecs importJobSpecs) { - ImportSpec importSpec = importJobSpecs.getImportSpec(); - List fields = importSpec.getSchema().getFieldsList(); - List featureIds = fields.stream().map(Field::getFeatureId) - .filter(not(Strings::isNullOrEmpty)) - .collect(Collectors.toList()); - List entityNames = importSpec.getEntitiesList(); - - List featureSpecs = - importJobSpecs.getFeatureSpecsList().stream() - .filter(featureSpec -> featureIds.contains(featureSpec.getId())) - .collect(Collectors.toList()); - List entitySpecs = - importJobSpecs.getEntitySpecsList().stream() - .filter(entitySpec -> entityNames.contains(entitySpec.getName())) - .collect(Collectors.toList()); - return importJobSpecs.toBuilder() - .clearFeatureSpecs().addAllFeatureSpecs(featureSpecs) - .clearEntitySpecs().addAllEntitySpecs(entitySpecs).build(); - } - - public void validate() { - for (String entityName : importSpec.getEntitiesList()) { - getEntitySpec(entityName); - } - for (Field field : importSpec.getSchema().getFieldsList()) { - if (!Strings.isNullOrEmpty(field.getFeatureId())) { - getFeatureSpec(field.getFeatureId()); - } - } - } - - public EntitySpec getEntitySpec(String entityName) { - Preconditions.checkArgument( - entitySpecs.containsKey(entityName), - String.format("Unknown entity %s, spec was not initialized", entityName)); - return entitySpecs.get(entityName); - } - - public FeatureSpec getFeatureSpec(String featureId) { - Preconditions.checkArgument( - featureSpecs.containsKey(featureId), - String.format("Unknown feature %s, spec was not initialized", featureId)); - return featureSpecs.get(featureId); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/model/Values.java b/ingestion/src/main/java/feast/ingestion/model/Values.java deleted file mode 100644 index 0bf514aaab8..00000000000 --- a/ingestion/src/main/java/feast/ingestion/model/Values.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import com.google.protobuf.ByteString; -import com.google.protobuf.Timestamp; -import feast.ingestion.exceptions.TypeConversionException; -import feast.ingestion.util.DateUtil; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.Value.ValCase; -import feast.types.ValueProto.ValueType; -import feast.types.ValueProto.ValueType.Enum; -import java.util.Base64; -import java.util.HashMap; -import java.util.Map; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.joda.time.DateTime; - -public class Values { - - private static final Map> valCaseToObjectFuncMap = - new HashMap<>(); - private static final Map valueTypeToValCaseMap = new HashMap<>(); - private static final Map valCaseToValueTypeMap = new HashMap<>(); - - private static final Value TRUE_BOOL_VAL = Value.newBuilder().setBoolVal(true).build(); - private static final Value FALSE_BOOL_VAL = Value.newBuilder().setBoolVal(false).build(); - - static { - Wrapper[] wrappers = - new Wrapper[]{ - new Wrapper(Enum.BYTES, ValCase.BYTESVAL, Value::getBytesVal), - new Wrapper(Enum.STRING, ValCase.STRINGVAL, Value::getStringVal), - new Wrapper(Enum.INT32, ValCase.INT32VAL, Value::getInt32Val), - new Wrapper(Enum.INT64, ValCase.INT64VAL, Value::getInt64Val), - new Wrapper(Enum.DOUBLE, ValCase.DOUBLEVAL, Value::getDoubleVal), - new Wrapper(Enum.FLOAT, ValCase.FLOATVAL, Value::getFloatVal), - new Wrapper(Enum.BOOL, ValCase.BOOLVAL, Value::getBoolVal), - new Wrapper(Enum.TIMESTAMP, ValCase.TIMESTAMPVAL, Value::getTimestampVal) - }; - for (Wrapper wrapper : wrappers) { - valueTypeToValCaseMap.put(wrapper.valueType, wrapper.valCase); - valCaseToValueTypeMap.put(wrapper.valCase, wrapper.valueType); - valCaseToObjectFuncMap.put(wrapper.valCase, wrapper.toObject); - } - } - private Values() {} - - public static Value ofInt64(long val) { - return Value.newBuilder().setInt64Val(val).build(); - } - - public static Value ofInt32(int val) { - return Value.newBuilder().setInt32Val(val).build(); - } - - public static Value ofFloat(float val) { - return Value.newBuilder().setFloatVal(val).build(); - } - - public static Value ofDouble(double val) { - return Value.newBuilder().setDoubleVal(val).build(); - } - - public static Value ofString(String val) { - return Value.newBuilder().setStringVal(val).build(); - } - - public static Value ofBool(boolean val) { - return (val) ? TRUE_BOOL_VAL : FALSE_BOOL_VAL; - } - - public static Value ofTimestamp(Timestamp val) { - return Value.newBuilder().setTimestampVal(val).build(); - } - - public static Value ofTimestamp(DateTime val) { - return ofTimestamp(DateUtil.toTimestamp(val)); - } - - public static Value ofBytes(ByteString val) { - return Value.newBuilder().setBytesVal(val).build(); - } - - public static Value ofBytes(byte[] val) { - return ofBytes(ByteString.copyFrom(val)); - } - - public static ValueType.Enum toValueType(Value value) { - return valCaseToValueTypeMap.getOrDefault(value.getValCase(), Enum.UNKNOWN); - } - - private static ValueType.Enum toValueType(ValCase valCase) { - return valCaseToValueTypeMap.getOrDefault(valCase, Enum.UNKNOWN); - } - - private static Object toObject(Value value) { - return valCaseToObjectFuncMap.get(value.getValCase()).apply(value); - } - - public static Value asType(Value value, ValueType.Enum valueType) throws TypeConversionException { - return asType(value, valueTypeToValCaseMap.get(valueType)); - } - - private static Value asType(Value value, ValCase valCase) throws TypeConversionException { - if (value.getValCase() == valCase) { - return value; - } - try { - switch (valCase) { - case BYTESVAL: - return asBytes(value); - case STRINGVAL: - return asString(value); - case INT32VAL: - return asInt32(value); - case INT64VAL: - return asInt64(value); - case DOUBLEVAL: - return asDouble(value); - case FLOATVAL: - return asFloat(value); - case BOOLVAL: - return asBool(value); - case TIMESTAMPVAL: - return asTimestamp(value); - } - throw new TypeConversionException(); - } catch (UnsupportedOperationException e) { - String message = - String.format( - "Converting not supported from type %s to type %s type", - toValueType(value), toValueType(valCase)); - throw new TypeConversionException(message, e); - } catch (Throwable e) { - String message = - String.format( - "Exception while converting from type %s to type %s type", - toValueType(value), toValueType(valCase)); - throw new TypeConversionException(message, e); - } - } - - public static Value asString(Value value) { - switch (value.getValCase()) { - case STRINGVAL: - return value; - case TIMESTAMPVAL: - return ofString(DateUtil.toString(value.getTimestampVal())); - case BYTESVAL: - return ofString(Base64.getEncoder().encodeToString(value.getBytesVal().toByteArray())); - default: - return ofString(toObject(value).toString()); - } - } - - public static Value asTimestamp(Value value) { - switch (value.getValCase()) { - case TIMESTAMPVAL: - return value; - case STRINGVAL: - return ofTimestamp(DateUtil.toTimestamp(value.getStringVal())); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asInt64(Value value) { - switch (value.getValCase()) { - case INT64VAL: - return value; - case INT32VAL: - return ofInt64(value.getInt32Val()); - case STRINGVAL: - return ofInt64(Long.valueOf(value.getStringVal())); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asFloat(Value value) { - switch (value.getValCase()) { - case FLOATVAL: - return value; - case STRINGVAL: - return ofFloat(Float.valueOf(value.getStringVal())); - case INT32VAL: - return ofFloat((float) value.getInt32Val()); - case DOUBLEVAL: - return ofFloat((float) value.getDoubleVal()); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asBool(Value value) { - switch (value.getValCase()) { - case BOOLVAL: - return value; - case STRINGVAL: - return ofBool(Boolean.valueOf(value.getStringVal())); - case INT32VAL: - int int32Val = value.getInt32Val(); - boolean isInt32One = int32Val == 1; - boolean isInt32Zero = int32Val == 0; - if (!isInt32One && !isInt32Zero) { - throw new IllegalArgumentException( - "Only int32 value of 0 or 1 can be converted to boolean, got: " + int32Val); - } - return ofBool(isInt32One); - case INT64VAL: - long int64Val = value.getInt64Val(); - boolean isInt64One = int64Val == 1; - boolean isInt64Zero = int64Val == 0; - if (!isInt64One && !isInt64Zero) { - throw new IllegalArgumentException( - "Only int64 value of 0 or 1 can be converted to boolean, got: " + int64Val); - } - return ofBool(isInt64One); - case FLOATVAL: - float floatVal = value.getFloatVal(); - boolean isFloatOne = Float.compare(floatVal, 1) == 0; - boolean isFloatZero = Float.compare(floatVal, 0) == 0; - if (!isFloatOne && !isFloatZero) { - throw new IllegalArgumentException( - "Only float value of 0.0 or 1.0 can be converted to boolean, got: " + floatVal); - } - return ofBool(isFloatOne); - case DOUBLEVAL: - double doubleVal = value.getDoubleVal(); - boolean isDoubleOne = Double.compare(doubleVal, 1) == 0; - boolean isDoubleZero = Double.compare(doubleVal, 0) == 0; - if (!isDoubleOne && !isDoubleZero) { - throw new IllegalArgumentException( - "Only double value of 0.0 or 1.0 can be converted to boolean, got: " + doubleVal); - } - return ofBool(isDoubleOne); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asInt32(Value value) { - switch (value.getValCase()) { - case INT32VAL: - return value; - case STRINGVAL: - return ofInt32(Integer.valueOf(value.getStringVal())); - case INT64VAL: - long longValue = value.getInt64Val(); - if (longValue > Integer.MAX_VALUE || longValue < Integer.MIN_VALUE) { - throw new IllegalArgumentException( - "The int64 value can't be casted to int32: " + longValue); - } - return ofInt32((int) longValue); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asDouble(Value value) { - switch (value.getValCase()) { - case DOUBLEVAL: - return value; - case FLOATVAL: - return ofDouble(value.getFloatVal()); - case INT32VAL: - return ofDouble(value.getInt32Val()); - case INT64VAL: - return ofDouble(value.getInt64Val()); - case STRINGVAL: - return ofDouble(Double.valueOf(value.getStringVal())); - default: - throw new UnsupportedOperationException(); - } - } - - public static Value asBytes(Value value) { - switch (value.getValCase()) { - case BYTESVAL: - return value; - default: - throw new UnsupportedOperationException(); - } - } - - @AllArgsConstructor - static class Wrapper { - - ValueType.Enum valueType; - ValCase valCase; - SerializableFunction toObject; - } -} diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java deleted file mode 100644 index 25dd62d4abc..00000000000 --- a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.options; - -import com.google.auto.service.AutoService; -import java.util.Collections; -import org.apache.beam.sdk.options.Default; -import org.apache.beam.sdk.options.Description; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsRegistrar; -import org.apache.beam.sdk.options.Validation.Required; - -/** - * Options passed to Beam to influence the job's execution environment - */ -public interface ImportJobPipelineOptions extends PipelineOptions { - - @Description("Path to a workspace directory containing importJobSpecs.yaml") - @Required - String getWorkspace(); - - void setWorkspace(String value); - - @Description("If dry run is set, execute up to feature row validation") - @Default.Boolean(false) - Boolean isDryRun(); - - void setDryRun(Boolean value); - - @AutoService(PipelineOptionsRegistrar.class) - class ImportJobPipelineOptionsRegistrar implements PipelineOptionsRegistrar { - - @Override - public Iterable> getPipelineOptions() { - return Collections.singleton(ImportJobPipelineOptions.class); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java new file mode 100644 index 00000000000..1417d22a4d6 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -0,0 +1,93 @@ +/* + * Copyright 2018 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package feast.ingestion.options; + +import java.util.List; +import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; +import org.apache.beam.runners.direct.DirectOptions; +import org.apache.beam.sdk.options.Default; +import org.apache.beam.sdk.options.Description; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.Validation.Required; + +/** Options passed to Beam to influence the job's execution environment */ +public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, DirectOptions { + @Required + @Description( + "JSON string representation of the FeatureSetSpec that the import job will process." + + "FeatureSetSpec follows the format in feast.core.FeatureSet proto." + + "Mutliple FeatureSetSpec can be passed by specifying '--featureSetSpec={...}' multiple times" + + "The conversion of Proto message to JSON should follow this mapping:" + + "https://developers.google.com/protocol-buffers/docs/proto3#json" + + "Please minify and remove all insignificant whitespace such as newline in the JSON string" + + "to prevent error when parsing the options") + List getFeatureSetSpecJson(); + + void setFeatureSetSpecJson(List featureSetSpecJson); + + @Required + @Description( + "JSON string representation of the Store that import job will write FeatureRow to." + + "Store follows the format in feast.core.Store proto." + + "Multiple Store can be passed by specifying '--store={...}' multiple times" + + "The conversion of Proto message to JSON should follow this mapping:" + + "https://developers.google.com/protocol-buffers/docs/proto3#json" + + "Please minify and remove all insignificant whitespace such as newline in the JSON string" + + "to prevent error when parsing the options") + List getStoreJson(); + + void setStoreJson(List storeJson); + + @Description( + "(Optional) Deadletter elements will be written to this BigQuery table." + + "Table spec must follow this format PROJECT_ID:DATASET_ID.PROJECT_ID" + + "The table will be created if not exists.") + String getDeadLetterTableSpec(); + + /** + * @param deadLetterTableSpec (Optional) BigQuery table for storing elements that failed to be + * processed. Table spec must follow this format + * PROJECT_ID:DATASET_ID.PROJECT_ID + */ + void setDeadLetterTableSpec(String deadLetterTableSpec); + + // TODO: expound + @Description( + "MetricsAccumulator exporter type to instantiate." + ) + @Default.String("none") + String getMetricsExporterType(); + + void setMetricsExporterType(String metricsExporterType); + + @Description( + "Host to write the metrics to. Required if the metrics exporter is set to StatsD." + ) + @Default.String("localhost") + String getStatsdHost(); + + void setStatsdHost(String StatsdHost); + + @Description( + "Port on StatsD server to write metrics to. Required if the metrics exporter is set to StatsD." + ) + @Default.Integer(8125) + int getStatsdPort(); + + void setStatsdPort(int StatsdPort); +} diff --git a/ingestion/src/main/java/feast/ingestion/options/JobOptions.java b/ingestion/src/main/java/feast/ingestion/options/JobOptions.java deleted file mode 100644 index c316f749bce..00000000000 --- a/ingestion/src/main/java/feast/ingestion/options/JobOptions.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.options; - -import com.fasterxml.jackson.annotation.JsonProperty; -import feast.options.Options; - -/** - * JobOptions are options passed in via the import spec, they are options that dictate certain - * behaviour of the job, they differ from the PipelineOptions in ImportJobOptions which are used to - * influence the execution environment. - */ -public class JobOptions implements Options { - - private long sampleLimit; - private boolean coalesceRowsEnabled = true; - private long coalesceRowsDelaySeconds; - private long coalesceRowsTimeoutSeconds; - - - @JsonProperty(value = "sample.limit") - public long getSampleLimit() { - return sampleLimit; - } - - public void setSampleLimit(long sampleLimit) { - this.sampleLimit = sampleLimit; - } - - @JsonProperty(value = "coalesceRows.enabled") - public boolean isCoalesceRowsEnabled() { - return coalesceRowsEnabled; - } - - public void setCoalesceRowsEnabled(boolean coalesceRows) { - this.coalesceRowsEnabled = coalesceRows; - } - - @JsonProperty(value = "coalesceRows.delaySeconds") - public long getCoalesceRowsDelaySeconds() { - return coalesceRowsDelaySeconds; - } - - public void setCoalesceRowsDelaySeconds(long coalesceRowsDelaySeconds) { - this.coalesceRowsDelaySeconds = coalesceRowsDelaySeconds; - } - - @JsonProperty(value = "coalesceRows.timeoutSeconds") - public long getCoalesceRowsTimeoutSeconds() { - return coalesceRowsTimeoutSeconds; - } - - public void setCoalesceRowsTimeoutSeconds(long coalesceRowsTimeoutSeconds) { - this.coalesceRowsTimeoutSeconds = coalesceRowsTimeoutSeconds; - } - -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/BaseStoreTransform.java b/ingestion/src/main/java/feast/ingestion/transform/BaseStoreTransform.java deleted file mode 100644 index 66e45937324..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/BaseStoreTransform.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.base.Preconditions; -import com.google.inject.Inject; -import feast.ingestion.metrics.FeastMetrics; -import feast.ingestion.model.Specs; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.FeatureStoreFactory; -import feast.store.FeatureStoreWrite; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -@Slf4j -public class BaseStoreTransform extends PTransform, PDone> { - - private List stores; - private StorageSpec storageSpec; - private Specs specs; - - @Inject - public BaseStoreTransform(List stores, StorageSpec storageSpec, - Specs specs) { - this.stores = stores; - this.storageSpec = storageSpec; - this.specs = specs; - } - - @Override - public PDone expand(PCollection input) { - FeatureStoreWrite write = null; - for (FeatureStoreFactory factory : stores) { - log.info("Checking factory {} vs storageSpec {}", factory.getType(), storageSpec); - if (factory.getType().equals(storageSpec.getType())) { - write = factory.create(storageSpec, specs); - } - } - Preconditions.checkNotNull(write, - "Store %s with type %s not supported", - storageSpec.getId(), - storageSpec.getType()); - - input.apply("metrics.store." + storageSpec.getId(), - ParDo.of(FeastMetrics.incrDoFn("stored"))); - - return input.apply( - String.format("Write %s %s", storageSpec.getId(), storageSpec.getType()), - write); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRowExtended.java b/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRowExtended.java deleted file mode 100644 index 1ed84857b11..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRowExtended.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package feast.ingestion.transform; - -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.MapElements; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.TypeDescriptor; - -@AllArgsConstructor -public class CoalesceFeatureRowExtended extends - PTransform, PCollection> { - - private long delaySeconds; - private long timeoutSeconds; - - @Override - public PCollection expand(PCollection input) { - return input - .apply(MapElements.into(TypeDescriptor.of(FeatureRow.class)) - .via(FeatureRowExtended::getRow)) - .apply(new CoalesceFeatureRows(delaySeconds, timeoutSeconds)) - .apply(new ToFeatureRowExtended()); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java deleted file mode 100644 index aa8a1754c2f..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/CoalesceFeatureRows.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.base.Preconditions; -import com.google.protobuf.util.Timestamps; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast_ingestion.types.CoalesceAccumProto.CoalesceAccum; -import feast_ingestion.types.CoalesceKeyProto.CoalesceKey; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.state.BagState; -import org.apache.beam.sdk.state.StateSpec; -import org.apache.beam.sdk.state.StateSpecs; -import org.apache.beam.sdk.state.TimeDomain; -import org.apache.beam.sdk.state.Timer; -import org.apache.beam.sdk.state.TimerSpec; -import org.apache.beam.sdk.state.TimerSpecs; -import org.apache.beam.sdk.state.ValueState; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.apache.beam.sdk.transforms.Values; -import org.apache.beam.sdk.transforms.WithKeys; -import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime; -import org.apache.beam.sdk.transforms.windowing.Window; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollection.IsBounded; -import org.apache.beam.sdk.values.TypeDescriptor; -import org.joda.time.Duration; -import org.joda.time.Instant; - -/** - * Takes FeatureRow, and merges them if they have the same FeatureRowKey, so that the latest values - * will be emitted. It emits only once for batch. - * - *

For streaming we emits after a delay of 10 seconds (event time) by default we keep the - * previous state around for merging with future events. These timeout after 30 minutes by default. - */ -public class CoalesceFeatureRows - extends PTransform, PCollection> { - - private static final SerializableFunction KEY_FUNCTION = - (row) -> - CoalesceKey.newBuilder() - .setEntityName(row.getEntityName()) - .setEntityKey(row.getEntityKey()) - .build(); - - private static final Duration DEFAULT_DELAY = Duration.standardSeconds(10); - private static final Duration DEFAULT_TIMEOUT = Duration.ZERO; - - private Duration delay; - private Duration timeout; - - CoalesceFeatureRows() { - this(0, 0); - } - - public CoalesceFeatureRows(long delaySeconds, long timeoutSeconds) { - this(Duration.standardSeconds(delaySeconds), Duration.standardSeconds(timeoutSeconds)); - } - - public CoalesceFeatureRows(Duration delay, Duration timeout) { - this.delay = (delay.isEqual(Duration.ZERO)) ? DEFAULT_DELAY : delay; - this.timeout = (timeout.isEqual(Duration.ZERO)) ? DEFAULT_TIMEOUT : timeout; - } - - /** Return a FeatureRow of the new features accumulated since the given timestamp */ - public static FeatureRow toFeatureRow(CoalesceAccum accum, long counter) { - Preconditions.checkArgument( - counter <= accum.getCounter(), - "Accumulator has no features at or newer than the provided counter"); - FeatureRow.Builder builder = - FeatureRow.newBuilder() - .setEntityName(accum.getEntityName()) - .setEntityKey(accum.getEntityKey()); - // This will be the latest timestamp - if (accum.hasEventTimestamp()) { - builder.setEventTimestamp(accum.getEventTimestamp()); - } - - Map features = accum.getFeaturesMap(); - if (counter <= 0) { - builder.addAllFeatures(features.values()); - } else { - List featureList = - accum - .getFeatureMarksMap() - .entrySet() - .stream() - .filter((e) -> e.getValue() > counter) - .map((e) -> features.get(e.getKey())) - .collect(Collectors.toList()); - builder.addAllFeatures(featureList); - } - return builder.build(); - } - - public static FeatureRow combineFeatureRows(Iterable rows) { - return toFeatureRow(combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows), 0); - } - - public static CoalesceAccum combineFeatureRows(CoalesceAccum seed, Iterable rows) { - CoalesceAccum.Builder accum = seed.toBuilder(); - Map features = new HashMap<>(); - Map featureMarks = new HashMap<>(); - long rowCount = seed.getCounter(); - for (FeatureRow row : rows) { - rowCount += 1; - if (Timestamps.compare(accum.getEventTimestamp(), row.getEventTimestamp()) <= 0) { - // row has later timestamp than accum. - for (Feature feature : row.getFeaturesList()) { - features.put(feature.getId(), feature); - // These marks are used to determine which features are new when we convert an accum - // back into a FeatureRow. - featureMarks.put(feature.getId(), rowCount); - } - accum.setEntityName(row.getEntityName()); - accum.setEntityKey(row.getEntityKey()); - if (row.hasEventTimestamp()) { - accum.setEventTimestamp(row.getEventTimestamp()); - } - } else { - for (Feature feature : row.getFeaturesList()) { - String featureId = feature.getId(); - // only insert an older feature if there was no newer one. - if (!features.containsKey(featureId)) { - features.put(featureId, feature); - featureMarks.put(feature.getId(), rowCount); - } - } - } - } - if (rowCount == seed.getCounter()) { - return seed; - } else { - return accum - .setCounter(rowCount) - .putAllFeatures(features) - .putAllFeatureMarks(featureMarks) - .build(); - } - } - - @Override - public PCollection expand(PCollection input) { - PCollection> kvs = - input - .apply(WithKeys.of(KEY_FUNCTION).withKeyType(TypeDescriptor.of(CoalesceKey.class))) - .setCoder( - KvCoder.of(ProtoCoder.of(CoalesceKey.class), ProtoCoder.of(FeatureRow.class))); - - if (kvs.isBounded().equals(IsBounded.UNBOUNDED)) { - return kvs.apply( - "Configure window", - Window.>configure() - .withAllowedLateness(Duration.ZERO) - .discardingFiredPanes() - .triggering(AfterProcessingTime.pastFirstElementInPane())) - .apply(ParDo.of(new CombineStateDoFn(delay, timeout))) - .apply(Values.create()); - } else { - return kvs.apply(Combine.perKey(CoalesceFeatureRows::combineFeatureRows)) - .apply(Values.create()); - } - } - - @Slf4j - @AllArgsConstructor - public static class CombineStateDoFn - extends DoFn, KV> { - - @StateId("lastKnownAccumValue") - private final StateSpec> lastKnownAccumValueSpecs = - StateSpecs.value(ProtoCoder.of(CoalesceAccum.class)); - - @StateId("newElementsBag") - private final StateSpec> newElementsBag = - StateSpecs.bag(ProtoCoder.of(FeatureRow.class)); - - @StateId("lastTimerTimestamp") - private final StateSpec> lastTimerTimestamp = StateSpecs.value(); - - @TimerId("bufferTimer") - private final TimerSpec bufferTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME); - - @TimerId("timeoutTimer") - private final TimerSpec timeoutTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME); - - private Duration delay; - private Duration timeout; - - @ProcessElement - public void processElement( - ProcessContext context, - @StateId("newElementsBag") BagState newElementsBag, - @TimerId("bufferTimer") Timer bufferTimer, - @TimerId("timeoutTimer") Timer timeoutTimer, - @StateId("lastTimerTimestamp") ValueState lastTimerTimestampValue) { - newElementsBag.add(context.element().getValue()); - log.debug("Adding FeatureRow to state bag {}", context.element()); - - Instant lastTimerTimestamp = lastTimerTimestampValue.read(); - Instant contextTimestamp = context.timestamp(); - if (lastTimerTimestamp == null && timeout.isLongerThan(Duration.ZERO)) { - // We never timeout the state if a timeout is not set. - timeoutTimer.offset(timeout).setRelative(); - } - if (lastTimerTimestamp == null - || lastTimerTimestamp.isBefore(contextTimestamp) - || lastTimerTimestamp.equals(contextTimestamp)) { - lastTimerTimestamp = context.timestamp().plus(delay); - log.debug("Setting timer for key {} to {}", context.element().getKey(), lastTimerTimestamp); - lastTimerTimestampValue.write(lastTimerTimestamp); - bufferTimer.offset(delay).setRelative(); - } - } - - @OnTimer("bufferTimer") - public void bufferOnTimer( - OnTimerContext context, - OutputReceiver> out, - @StateId("newElementsBag") BagState newElementsBag, - @StateId("lastKnownAccumValue") ValueState lastKnownAccumValue) { - log.debug("bufferOnTimer triggered {}", context.timestamp()); - flush(out, newElementsBag, lastKnownAccumValue); - } - - @OnTimer("timeoutTimer") - public void timeoutOnTimer( - OnTimerContext context, - OutputReceiver> out, - @StateId("newElementsBag") BagState newElementsBag, - @StateId("lastKnownAccumValue") ValueState lastKnownAccumValue) { - log.debug("timeoutOnTimer triggered {}", context.timestamp()); - flush(out, newElementsBag, lastKnownAccumValue); - newElementsBag.clear(); - lastKnownAccumValue.clear(); - } - - public void flush( - OutputReceiver> out, - @StateId("newElementsBag") BagState newElementsBag, - @StateId("lastKnownAccumValue") ValueState lastKnownAccumValue) { - log.debug("Flush triggered"); - Iterable rows = newElementsBag.read(); - if (!rows.iterator().hasNext()) { - log.debug("Flush with no new elements"); - return; - } - CoalesceAccum lastKnownAccum = lastKnownAccumValue.read(); - if (lastKnownAccum == null) { - lastKnownAccum = CoalesceAccum.getDefaultInstance(); - } - // Check if we have more than one value in our list. - CoalesceAccum accum = combineFeatureRows(lastKnownAccum, rows); - FeatureRow row = toFeatureRow(accum, lastKnownAccum.getCounter()); - log.debug("Timer fired and added FeatureRow to output {}", row); - // Clear the elements now that they have been processed - newElementsBag.clear(); - lastKnownAccumValue.write(accum); - - // Output the value stored in the the processed que which matches this timers time - out.output(KV.of(KEY_FUNCTION.apply(row), row)); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ErrorsStoreTransform.java b/ingestion/src/main/java/feast/ingestion/transform/ErrorsStoreTransform.java deleted file mode 100644 index c33316fc3fa..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/ErrorsStoreTransform.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.inject.Inject; -import feast.ingestion.model.Specs; -import feast.ingestion.options.ImportJobPipelineOptions; -import feast.ingestion.util.PathUtil; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.errors.FeatureErrorsFactory; -import feast.store.errors.json.JsonFileErrorsFactory; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -public class ErrorsStoreTransform extends BaseStoreTransform { - - @Inject - public ErrorsStoreTransform( - List errorsStoreFactories, Specs specs, - ImportJobPipelineOptions options) { - super(errorsStoreFactories, getErrorStoreSpec(specs, options), specs); - } - - public static StorageSpec getErrorStoreSpec(Specs specs, ImportJobPipelineOptions options) { - String workspace = options.getWorkspace(); - StorageSpec errorsStoreSpec = specs.getErrorsStoreSpec(); - if (Strings.isNullOrEmpty(errorsStoreSpec.getType())) { - Preconditions.checkArgument(!Strings.isNullOrEmpty(workspace), "Workspace must be provided"); - Path workspaceErrorsPath = PathUtil.getPath(workspace).resolve("errors"); - try { - Files.createDirectory(workspaceErrorsPath); - } catch (IOException e) { - log.error("Could not intialise workspace errors directory {}", workspaceErrorsPath); - throw new RuntimeException(e); - } - errorsStoreSpec = StorageSpec.newBuilder() - .setId("ERRORS") - .setType(JsonFileErrorsFactory.JSON_FILES_TYPE) - .putOptions("path", workspaceErrorsPath.toString()).build(); - } - return errorsStoreSpec; - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ReadFeaturesTransform.java b/ingestion/src/main/java/feast/ingestion/transform/ReadFeaturesTransform.java deleted file mode 100644 index 15dc223cc2f..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/ReadFeaturesTransform.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.base.Preconditions; -import com.google.inject.Inject; -import feast.ingestion.model.Specs; -import feast.source.FeatureSourceFactory; -import feast.source.FeatureSourceFactoryService; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureRowProto.FeatureRow; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - -public class ReadFeaturesTransform extends PTransform> { - - private ImportSpec importSpec; - - @Inject - public ReadFeaturesTransform(Specs specs) { - this.importSpec = specs.getImportSpec(); - } - - @Override - public PCollection expand(PInput input) { - return input.getPipeline().apply("Read " + importSpec.getType(), getTransform()); - } - - public PTransform> getTransform() { - String type = importSpec.getType(); - Preconditions.checkArgument(!type.isEmpty(), "type missing in import spec"); - - FeatureSourceFactory featureSourceFactory = null; - for (FeatureSourceFactory factory : FeatureSourceFactoryService.getAll()) { - if (type.equals(factory.getType())) { - featureSourceFactory = factory; - } - } - Preconditions - .checkNotNull(featureSourceFactory, "No FeatureSourceFactory found for type " + type); - return featureSourceFactory.create(importSpec); - } -} - diff --git a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java new file mode 100644 index 00000000000..012823f84f6 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java @@ -0,0 +1,91 @@ +package feast.ingestion.transform; + +import com.google.auto.value.AutoValue; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import feast.core.SourceProto.Source; +import feast.core.SourceProto.SourceType; +import feast.ingestion.transform.fn.KafkaRecordToFeatureRowDoFn; +import feast.ingestion.values.FailedElement; +import feast.types.FeatureRowProto.FeatureRow; +import java.util.List; +import java.util.Map; +import org.apache.beam.sdk.io.kafka.KafkaIO; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.PBegin; +import org.apache.beam.sdk.values.PCollectionTuple; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.beam.sdk.values.TupleTagList; +import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap; + +@AutoValue +public abstract class ReadFromSource extends PTransform { + + public abstract Source getSource(); + + public abstract Map> getFeatureSetTagByKey(); + + public abstract TupleTag getFailureTag(); + + public static Builder newBuilder() { + return new AutoValue_ReadFromSource.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setSource(Source source); + + public abstract Builder setFeatureSetTagByKey( + Map> featureSetTagByKey); + + public abstract Builder setFailureTag(TupleTag failureTag); + + abstract ReadFromSource autobuild(); + + public ReadFromSource build() { + ReadFromSource read = autobuild(); + Source source = read.getSource(); + Preconditions.checkState( + source.getType().equals(SourceType.KAFKA), + "Source type must be KAFKA. Please raise an issue in https://github.com/gojek/feast/issues to request additional source types."); + Preconditions.checkState( + !source.getKafkaSourceConfig().getBootstrapServers().isEmpty(), + "bootstrap_servers cannot be empty."); + Preconditions.checkState( + !source.getKafkaSourceConfig().getTopic().isEmpty(), "topic cannot be empty."); + return read; + } + } + + @Override + public PCollectionTuple expand(PBegin input) { + return input + .getPipeline() + .apply( + "ReadFromKafka", + KafkaIO.readBytes() + .withBootstrapServers(getSource().getKafkaSourceConfig().getBootstrapServers()) + .withTopic(getSource().getKafkaSourceConfig().getTopic()) + .withConsumerConfigUpdates( + ImmutableMap.of( + "group.id", + generateConsumerGroupId(input.getPipeline().getOptions().getJobName()))) + .withReadCommitted() + .commitOffsetsInFinalize()) + .apply( + "KafkaRecordToFeatureRow", ParDo.of(KafkaRecordToFeatureRowDoFn.newBuilder() + .setFeatureSetTagByKey(getFeatureSetTagByKey()) + .setFailureTag(getFailureTag()) + .build()) + .withOutputTags(new TupleTag("placeholder") {}, + TupleTagList.of(Lists + .newArrayList(getFeatureSetTagByKey().values())) + .and(getFailureTag()))); + } + + private String generateConsumerGroupId(String jobName) { + return "feast_import_job_" + jobName; + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ServingStoreTransform.java b/ingestion/src/main/java/feast/ingestion/transform/ServingStoreTransform.java deleted file mode 100644 index 5299fe12df0..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/ServingStoreTransform.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.inject.Inject; -import feast.ingestion.model.Specs; -import feast.store.serving.FeatureServingFactory; -import java.util.List; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class ServingStoreTransform extends BaseStoreTransform { - - @Inject - public ServingStoreTransform(List stores, Specs specs) { - super(stores, specs.getServingStorageSpec(), specs); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/SplitFeatures.java b/ingestion/src/main/java/feast/ingestion/transform/SplitFeatures.java deleted file mode 100644 index 8a2cffe3325..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/SplitFeatures.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.collect.Maps; -import java.io.Serializable; -import java.util.HashMap; -import lombok.Getter; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionTuple; -import org.apache.beam.sdk.values.TupleTag; -import org.apache.beam.sdk.values.TupleTagList; -import feast.ingestion.model.Specs; -import feast.ingestion.transform.fn.SplitFeaturesDoFn; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; - -public class SplitFeatures { - - @Getter - public static class SingleOutputSplit - extends PTransform, PCollection> { - - private SplitStrategy strategy; - private Specs specs; - - public SingleOutputSplit(SerializableFunction splitFn, Specs specs) { - this.strategy = new SplitStrategy<>(splitFn); - this.specs = specs; - } - - @Override - public PCollection expand(PCollection input) { - return input.apply(ParDo.of(new SplitFeaturesDoFn<>(strategy, specs))); - } - } - - @Getter - public static class MultiOutputSplit - extends PTransform, PCollectionTuple> { - - public static TupleTag MAIN_TAG = new TupleTag() {}; - private SplitStrategy strategy; - private Specs specs; - - public MultiOutputSplit( - SerializableFunction splitFn, Iterable keys, Specs specs) { - this.strategy = new SplitStrategy<>(splitFn, keys); - this.specs = specs; - } - - @Override - public PCollectionTuple expand(PCollection input) { - return input.apply( - ParDo.of(new SplitFeaturesDoFn<>(strategy, specs)) - .withOutputTags(MAIN_TAG, strategy.getTags())); - } - } - - public static class SplitStrategy implements Serializable { - - private SerializableFunction splitFn; - private HashMap> tagMap = Maps.newHashMap(); - - public SplitStrategy(SerializableFunction splitFn, Iterable keys) { - this.splitFn = splitFn; - for (T key : keys) { - String tagId = getTagId(key); - tagMap.put(tagId, new TupleTag(tagId) {}); - } - } - - public SplitStrategy(SerializableFunction splitFn) { - this.splitFn = splitFn; - } - - public TupleTagList getTags() { - TupleTagList list = TupleTagList.empty(); - for (TupleTag tag : tagMap.values()) { - list = list.and(tag); - } - return list; - } - - public TupleTag getTag(FeatureSpec featureInfo) { - return getTag(splitFn.apply(featureInfo)); - } - - public TupleTag getTag(T key) { - return new TupleTag(getTagId(key)) {}; - } - - private String getTagId(T key) { - return key.toString(); - } - - public boolean isOutputTag(TupleTag tag) { - return tagMap.containsKey(tag.getId()); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/SplitOutputByStore.java b/ingestion/src/main/java/feast/ingestion/transform/SplitOutputByStore.java deleted file mode 100644 index ffb1393698e..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/SplitOutputByStore.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import feast.ingestion.model.Specs; -import feast.ingestion.transform.SplitFeatures.MultiOutputSplit; -import feast.ingestion.values.PFeatureRows; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.FeatureStoreFactory; -import feast.store.FeatureStoreWrite; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionList; -import org.apache.beam.sdk.values.PCollectionTuple; -import org.apache.beam.sdk.values.TupleTag; - -@AllArgsConstructor -@Slf4j -public class SplitOutputByStore extends PTransform { - - private Collection stores; - private SerializableFunction selector; - private Specs specs; - private Map storageSpecs; - - @Override - public PFeatureRows expand(PFeatureRows input) { - Map transforms = getFeatureStoreTransforms(); - Set keys = transforms.keySet(); - - log.info(String.format("Splitting on keys = [%s]", String.join(",", keys))); - MultiOutputSplit splitter = new MultiOutputSplit<>(selector, keys, specs); - PCollectionTuple splits = input.getMain().apply(splitter); - - Map, FeatureStoreWrite> taggedTransforms = new HashMap<>(); - for (String key : transforms.keySet()) { - TupleTag tag = splitter.getStrategy().getTag(key); - taggedTransforms.put(tag, transforms.get(key)); - } - - PCollection written = splits - .apply(new WriteTags(taggedTransforms, MultiOutputSplit.MAIN_TAG)); - return new PFeatureRows( - written, - input.getErrors()); - } - - private Map getStoresMap() { - Map storesMap = new HashMap<>(); - for (FeatureStoreFactory servingStore : stores) { - storesMap.put(servingStore.getType(), servingStore); - } - return storesMap; - } - - private Map getFeatureStoreTransforms() { - Map storesMap = getStoresMap(); - Map transforms = new HashMap<>(); - for (String storeId : storageSpecs.keySet()) { - StorageSpec storageSpec = storageSpecs.get(storeId); - String type = storageSpec.getType(); - if (storesMap.containsKey(type)) { - transforms.put(storeId, storesMap.get(type).create(storageSpec, specs)); - } - } - return transforms; - } - - /** - * Writes each pcollection in the tuple to a correspondingly tagged write transform and returns - * the a union of the written rows. - * - * The main tag, is not written to a transform, but is returned. This represents the default for - * rows which have no associated store to write to but might want to be written down stream (eg, - * it has no serving store, but does have a warehouse store). - * - * Tag in the tuple that are not the main tag and have no transform, will be discarded - * completely. - */ - @AllArgsConstructor - public static class WriteTags extends - PTransform> { - - private Map, FeatureStoreWrite> transforms; - private TupleTag mainTag; - - @Override - public PCollection expand(PCollectionTuple tuple) { - List> outputList = Lists.newArrayList(); - for (TupleTag tag : transforms.keySet()) { - FeatureStoreWrite write = transforms.get(tag); - Preconditions.checkNotNull(write, String.format("Null transform for tag=%s", tag.getId())); - PCollection input = tuple.get(tag); - input.apply(String.format("Write to %s", tag.getId()), write); - outputList.add(input); - } - // FeatureRows with no matching write transform end up in `input.get(mainTag)` and considered - // discardible, we return them in the main output so they are considered written, but don't - // actually write them to any store. - outputList.add(tuple.get(mainTag)); - return PCollectionList.of(outputList).apply("Flatten main", Flatten.pCollections()); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ToFeatureRowExtended.java b/ingestion/src/main/java/feast/ingestion/transform/ToFeatureRowExtended.java deleted file mode 100644 index 0ff6f5f616f..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/ToFeatureRowExtended.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import feast.ingestion.util.DateUtil; -import feast.types.FeatureRowExtendedProto.Attempt; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.joda.time.DateTime; - -public class ToFeatureRowExtended extends - PTransform, PCollection> { - - @Override - public PCollection expand(PCollection input) { - return input.apply(ParDo.of(new DoFn() { - @ProcessElement - public void processElement(@Element FeatureRow row, OutputReceiver out) { - out.output(FeatureRowExtended.newBuilder() - .setRow(row) - .setFirstSeen(DateUtil.toTimestamp(DateTime.now())) - .setLastAttempt(Attempt.newBuilder() - .setAttempts(0) - .build()) - .build()); - } - })); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java new file mode 100644 index 00000000000..b026a4fd523 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java @@ -0,0 +1,59 @@ +package feast.ingestion.transform; + +import com.google.auto.value.AutoValue; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.ingestion.transform.fn.ValidateFeatureRowDoFn; +import feast.ingestion.utils.SpecUtil; +import feast.ingestion.values.FailedElement; +import feast.ingestion.values.Field; +import feast.types.FeatureRowProto.FeatureRow; +import java.util.Map; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollectionTuple; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.beam.sdk.values.TupleTagList; + +@AutoValue +public abstract class ValidateFeatureRows extends + PTransform, PCollectionTuple> { + + public abstract FeatureSetSpec getFeatureSetSpec(); + + public abstract TupleTag getSuccessTag(); + + public abstract TupleTag getFailureTag(); + + public static Builder newBuilder() { + return new AutoValue_ValidateFeatureRows.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setFeatureSetSpec(FeatureSetSpec featureSetSpec); + + public abstract Builder setSuccessTag(TupleTag successTag); + + public abstract Builder setFailureTag(TupleTag failureTag); + + public abstract ValidateFeatureRows build(); + } + + @Override + public PCollectionTuple expand(PCollection input) { + Map fieldsByName = SpecUtil + .getFieldByName(getFeatureSetSpec()); + + return input.apply("ValidateFeatureRows", + ParDo.of(ValidateFeatureRowDoFn.newBuilder() + .setFeatureSetName(getFeatureSetSpec().getName()) + .setFeatureSetVersion(getFeatureSetSpec().getVersion()) + .setFieldByName(fieldsByName) + .setSuccessTag(getSuccessTag()) + .setFailureTag(getFailureTag()) + .build()) + .withOutputTags(getSuccessTag(), TupleTagList.of(getFailureTag()))); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateTransform.java b/ingestion/src/main/java/feast/ingestion/transform/ValidateTransform.java deleted file mode 100644 index bc9cc6ee8dc..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/ValidateTransform.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import org.apache.beam.sdk.transforms.PTransform; -import feast.ingestion.model.Specs; -import feast.ingestion.transform.fn.ValidateFeatureRowsDoFn; -import feast.ingestion.values.PFeatureRows; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; - -public class ValidateTransform extends PTransform { - - private Specs specs; - - public ValidateTransform(Specs specs) { - this.specs = specs; - } - - @Override - public PFeatureRows expand(PFeatureRows input) { - ImportSpec importSpec = specs.getImportSpec(); - for (String entity : importSpec.getEntitiesList()) { - specs.getEntitySpec(entity); - } - for (Field field : importSpec.getSchema().getFieldsList()) { - if (!field.getFeatureId().isEmpty()) { - specs.getFeatureSpec(field.getFeatureId()); - } - } - return input.applyDoFn("Validate feature rows dofn", new ValidateFeatureRowsDoFn(specs)); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/WarehouseStoreTransform.java b/ingestion/src/main/java/feast/ingestion/transform/WarehouseStoreTransform.java deleted file mode 100644 index e06c954c499..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/WarehouseStoreTransform.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - -package feast.ingestion.transform; - -import com.google.inject.Inject; -import feast.ingestion.model.Specs; -import feast.store.serving.FeatureServingFactory; -import feast.store.warehouse.FeatureWarehouseFactory; -import java.util.List; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class WarehouseStoreTransform extends BaseStoreTransform { - - @Inject - public WarehouseStoreTransform(List stores, Specs specs) { - super(stores, specs.getWarehouseStorageSpec(), specs); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java new file mode 100644 index 00000000000..fc66a8afaec --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java @@ -0,0 +1,70 @@ +package feast.ingestion.transform; + +import com.google.api.services.bigquery.model.TableRow; +import com.google.auto.value.AutoValue; +import feast.ingestion.values.FailedElement; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; +import org.apache.beam.sdk.io.gcp.bigquery.WriteResult; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.PCollection; + +@AutoValue +public abstract class WriteFailedElementToBigQuery + extends PTransform, WriteResult> { + public abstract String getTableSpec(); + + public abstract String getJsonSchema(); + + public static Builder newBuilder() { + return new AutoValue_WriteFailedElementToBigQuery.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + /** + * @param tableSpec Table spec should follow the format "PROJECT_ID:DATASET_ID.TABLE_ID". Table will be created if not exists. + */ + public abstract Builder setTableSpec(String tableSpec); + + /** + * @param jsonSchema JSON string describing the schema of the table. + */ + public abstract Builder setJsonSchema(String jsonSchema); + + public abstract WriteFailedElementToBigQuery build(); + } + + @Override + public WriteResult expand(PCollection failedElements) { + return failedElements + .apply("FailedElementToTableRow", ParDo.of(new FailedElementToTableRowFn())) + .apply( + "WriteFailedElementsToBigQuery", + BigQueryIO.writeTableRows() + .to(getTableSpec()) + .withJsonSchema(getJsonSchema()) + .withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED) + .withWriteDisposition(WriteDisposition.WRITE_APPEND)); + } + + public static class FailedElementToTableRowFn extends DoFn { + @ProcessElement + public void processElement(ProcessContext context) { + final FailedElement element = context.element(); + final TableRow tableRow = + new TableRow() + .set("timestamp", element.getTimestamp().toString()) + .set("job_name", element.getJobName()) + .set("transform_name", element.getTransformName()) + .set("payload", element.getPayload()) + .set("error_message", element.getErrorMessage()) + .set("stack_trace", element.getStackTrace()); + context.output(tableRow); + } + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java new file mode 100644 index 00000000000..81520107973 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java @@ -0,0 +1,140 @@ +package feast.ingestion.transform; + +import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TimePartitioning; +import com.google.auto.value.AutoValue; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.BigQueryConfig; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.ingestion.options.ImportOptions; +import feast.ingestion.utils.ResourceUtil; +import feast.ingestion.values.FailedElement; +import feast.store.serving.bigquery.FeatureRowToTableRowDoFn; +import feast.store.serving.redis.FeatureRowToRedisMutationDoFn; +import feast.store.serving.redis.RedisCustomIO; +import feast.types.FeatureRowProto.FeatureRow; +import java.io.IOException; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.Method; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryInsertError; +import org.apache.beam.sdk.io.gcp.bigquery.InsertRetryPolicy; +import org.apache.beam.sdk.io.gcp.bigquery.WriteResult; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PDone; +import org.slf4j.Logger; + +@AutoValue +public abstract class WriteToStore extends PTransform, PDone> { + + private static final Logger log = org.slf4j.LoggerFactory.getLogger(WriteToStore.class); + + public abstract Store getStore(); + + public abstract FeatureSetSpec getFeatureSetSpec(); + + public static Builder newBuilder() { + return new AutoValue_WriteToStore.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setStore(Store store); + + public abstract Builder setFeatureSetSpec(FeatureSetSpec featureSetSpec); + + public abstract WriteToStore build(); + } + + @Override + public PDone expand(PCollection input) { + ImportOptions options = input.getPipeline().getOptions().as(ImportOptions.class); + StoreType storeType = getStore().getType(); + + switch (storeType) { + case REDIS: + RedisConfig redisConfig = getStore().getRedisConfig(); + input + .apply( + "FeatureRowToRedisMutation", + ParDo.of(new FeatureRowToRedisMutationDoFn(getFeatureSetSpec()))) + .apply( + "WriteRedisMutationToRedis", + RedisCustomIO.write(redisConfig.getHost(), redisConfig.getPort())); + break; + case BIGQUERY: + BigQueryConfig bigqueryConfig = getStore().getBigqueryConfig(); + String tableSpec = + String.format( + "%s:%s.%s_v%s", + bigqueryConfig.getProjectId(), + bigqueryConfig.getDatasetId(), + getFeatureSetSpec().getName(), + getFeatureSetSpec().getVersion()); + TimePartitioning timePartitioning = + new TimePartitioning() + .setType("DAY") + .setField(FeatureRowToTableRowDoFn.getEventTimestampColumn()); + + WriteResult bigqueryWriteResult = + input + .apply( + "FeatureRowToTableRow", + ParDo.of(new FeatureRowToTableRowDoFn(options.getJobName()))) + .apply( + "WriteTableRowToBigQuery", + BigQueryIO.writeTableRows() + .to(tableSpec) + .withCreateDisposition(CreateDisposition.CREATE_NEVER) + .withWriteDisposition(WriteDisposition.WRITE_APPEND) + .withExtendedErrorInfo() + .withMethod(Method.STREAMING_INSERTS) + .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors()) + .withTimePartitioning(timePartitioning)); + + if (options.getDeadLetterTableSpec() != null) { + bigqueryWriteResult + .getFailedInsertsWithErr() + .apply( + "WrapBigQueryInsertionError", + ParDo.of( + new DoFn() { + @ProcessElement + public void processElement(ProcessContext context) { + InsertErrors error = context.element().getError(); + TableRow row = context.element().getRow(); + try { + context.output( + FailedElement.newBuilder() + .setErrorMessage(error.toPrettyString()) + .setPayload(row.toPrettyString()) + .setJobName(context.getPipelineOptions().getJobName()) + .setTransformName("WriteTableRowToBigQuery") + .build()); + } catch (IOException e) { + log.error(e.getMessage()); + } + } + })) + .apply( + WriteFailedElementToBigQuery.newBuilder() + .setTableSpec(options.getDeadLetterTableSpec()) + .setJsonSchema(ResourceUtil.getDeadletterTableSchemaJson()) + .build()); + } + break; + default: + log.error("Store type '{}' is not supported. No Feature Row will be written.", storeType); + break; + } + + return PDone.in(input.getPipeline()); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/BaseFeatureDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/BaseFeatureDoFn.java deleted file mode 100644 index ec5d0395e0b..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/BaseFeatureDoFn.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import com.google.common.base.Strings; -import org.apache.beam.sdk.transforms.DoFn; -import feast.ingestion.exceptions.ErrorsHandler; -import feast.ingestion.metrics.FeastMetrics; -import feast.ingestion.model.Errors; -import feast.ingestion.model.Specs; -import feast.types.FeatureRowExtendedProto.Error; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; - -public abstract class BaseFeatureDoFn extends DoFn { - - private String transformName; - - public BaseFeatureDoFn withTransformName(String transformName) { - this.transformName = transformName; - return this; - } - - @ProcessElement - public void baseProcessElement(ProcessContext context) { - try { - processElementImpl(context); - } catch (Throwable throwable) { - FeastMetrics.inc(context.element().getRow(), "error"); - String message = - Strings.isNullOrEmpty(throwable.getMessage()) - ? "Error during process element" - : throwable.getMessage(); - Error error = Errors.toError(transformName, message, throwable); - ErrorsHandler.handleError(context, context.element(), error); - } - } - - public abstract void processElementImpl(ProcessContext context); -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java deleted file mode 100644 index c5ed560eeaf..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ConvertTypesDoFn.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import lombok.AllArgsConstructor; -import feast.ingestion.model.Specs; -import feast.ingestion.model.Values; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; - -@AllArgsConstructor -public class ConvertTypesDoFn extends BaseFeatureDoFn { - private Specs specs; - - @Override - public void processElementImpl(ProcessContext context) { - FeatureRowExtended rowExtended = context.element(); - FeatureRow row = rowExtended.getRow(); - FeatureRow.Builder rowBuilder = FeatureRow.newBuilder(); - rowBuilder - .setEntityName(row.getEntityName()) - .setEntityKey(row.getEntityKey()); - if (row.hasEventTimestamp()) { - rowBuilder.setEventTimestamp(row.getEventTimestamp()); - } - - for (Feature feature : row.getFeaturesList()) { - String featureId = feature.getId(); - FeatureSpec featureSpec = specs.getFeatureSpec(featureId); - - rowBuilder.addFeatures( - Feature.newBuilder() - .setId(featureId) - .setValue(Values.asType(feature.getValue(), featureSpec.getValueType()))); - } - context.output( - FeatureRowExtended.newBuilder() - .setRow(rowBuilder) - .setLastAttempt(rowExtended.getLastAttempt()) - .build()); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/FilterFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/FilterFeatureRowDoFn.java deleted file mode 100644 index b5ec7629d09..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/FilterFeatureRowDoFn.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.ingestion.transform.fn; - -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import org.apache.beam.sdk.transforms.DoFn; - -/** - * Filter FeatureRow to only contain feature with given IDs - */ -public class FilterFeatureRowDoFn extends DoFn { - private final Set featureIds; - - public FilterFeatureRowDoFn(List featureIds) { - this.featureIds = new HashSet<>(featureIds); - } - - @ProcessElement - public void processElement(ProcessContext context) { - FeatureRow input = context.element(); - FeatureRow.Builder output = FeatureRow.newBuilder(input).clearFeatures(); - for (Feature feature : input.getFeaturesList()) { - if (featureIds.contains(feature.getId())) { - output.addFeatures(feature); - } - } - context.output(output.build()); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/Identity.java b/ingestion/src/main/java/feast/ingestion/transform/fn/Identity.java deleted file mode 100644 index 82ce55537ec..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/Identity.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.DoFn; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; - -@AllArgsConstructor -public class Identity extends DoFn { - String name; - - @ProcessElement - public void processElement( - @Element FeatureRowExtended element, OutputReceiver out) { - out.output(element); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java new file mode 100644 index 00000000000..9b43a5ade82 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java @@ -0,0 +1,74 @@ +package feast.ingestion.transform.fn; + +import com.google.auto.value.AutoValue; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.ingestion.transform.ReadFromSource; +import feast.ingestion.transform.ReadFromSource.Builder; +import feast.ingestion.values.FailedElement; +import feast.ingestion.values.Field; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto; +import feast.types.ValueProto.Value.ValCase; +import java.util.Base64; +import java.util.Map; +import org.apache.beam.sdk.io.kafka.KafkaRecord; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.commons.lang3.exception.ExceptionUtils; + +@AutoValue +public abstract class KafkaRecordToFeatureRowDoFn extends + DoFn, FeatureRow> { + public abstract Map> getFeatureSetTagByKey(); + + public abstract TupleTag getFailureTag(); + + public static KafkaRecordToFeatureRowDoFn.Builder newBuilder() { + return new AutoValue_KafkaRecordToFeatureRowDoFn.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setFeatureSetTagByKey(Map> featureSetTagByKey); + + public abstract Builder setFailureTag(TupleTag failureTag); + + public abstract KafkaRecordToFeatureRowDoFn build(); + } + + @ProcessElement + public void processElement(ProcessContext context) { + byte[] value = context.element().getKV().getValue(); + FeatureRow featureRow; + + try { + featureRow = FeatureRow.parseFrom(value); + } catch (InvalidProtocolBufferException e) { + context.output( + getFailureTag(), + FailedElement.newBuilder() + .setTransformName("KafkaRecordToFeatureRow") + .setStackTrace(ExceptionUtils.getStackTrace(e)) + .setJobName(context.getPipelineOptions().getJobName()) + .setPayload(new String(Base64.getEncoder().encode(value))) + .setErrorMessage(e.getMessage()) + .build()); + return; + } + TupleTag tag = getFeatureSetTagByKey() + .getOrDefault(featureRow.getFeatureSet(), null); + if (tag == null) { + context.output( + getFailureTag(), + FailedElement.newBuilder() + .setTransformName("KafkaRecordToFeatureRow") + .setJobName(context.getPipelineOptions().getJobName()) + .setPayload(new String(Base64.getEncoder().encode(value))) + .setErrorMessage(String.format("Got row with unexpected feature set id %s. Expected one of %s.", featureRow.getFeatureSet(), getFeatureSetTagByKey().keySet())) + .build()); + return; + } + context.output(tag, featureRow); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java index d641c690a29..7b318fd1617 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java @@ -20,13 +20,13 @@ import com.google.protobuf.Message; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; -import lombok.extern.slf4j.Slf4j; import org.apache.beam.sdk.transforms.DoFn; +import org.slf4j.Logger; import org.slf4j.event.Level; -@Slf4j public class LoggerDoFn extends DoFn { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(LoggerDoFn.class); private Level level; private String prefix = ""; diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java deleted file mode 100644 index 5c46a5dd5df..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/SplitFeaturesDoFn.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import feast.ingestion.transform.SplitFeatures.SplitStrategy; -import feast.ingestion.model.Specs; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.values.TupleTag; - -@AllArgsConstructor -public class SplitFeaturesDoFn extends DoFn { - - private SplitStrategy splitStrategy; - private Specs specs; - - @ProcessElement - public void processElement(ProcessContext context) { - FeatureRowExtended rowExtended = context.element(); - FeatureRow row = rowExtended.getRow(); - Map, FeatureRow.Builder> taggedOutput = new HashMap<>(); - - for (Feature feature : row.getFeaturesList()) { - FeatureSpec featureSpec = specs.getFeatureSpec(feature.getId()); - TupleTag tag = splitStrategy.getTag(featureSpec); - FeatureRow.Builder builder = taggedOutput.get(tag); - - if (builder == null) { - builder = - FeatureRow.newBuilder() - .setEventTimestamp(row.getEventTimestamp()) - .setEntityName(row.getEntityName()) - .setEntityKey(row.getEntityKey()); - } - builder.addFeatures(feature); - taggedOutput.put(tag, builder); - } - - for (Entry, FeatureRow.Builder> entry : taggedOutput.entrySet()) { - FeatureRowExtended featureRowExtended = - FeatureRowExtended.newBuilder().mergeFrom(rowExtended).setRow(entry.getValue()).build(); - if (splitStrategy.isOutputTag(entry.getKey())) { - context.output(entry.getKey(), featureRowExtended); - } else { - context.output(featureRowExtended); - } - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java new file mode 100644 index 00000000000..2906e220b2c --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java @@ -0,0 +1,97 @@ +package feast.ingestion.transform.fn; + +import com.google.auto.value.AutoValue; +import feast.ingestion.values.FailedElement; +import feast.ingestion.values.Field; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto; +import feast.types.ValueProto.Value.ValCase; +import java.util.Map; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.values.TupleTag; + +@AutoValue +public abstract class ValidateFeatureRowDoFn extends DoFn { + + public abstract String getFeatureSetName(); + + public abstract int getFeatureSetVersion(); + + public abstract Map getFieldByName(); + + public abstract TupleTag getSuccessTag(); + + public abstract TupleTag getFailureTag(); + + public static Builder newBuilder() { + return new AutoValue_ValidateFeatureRowDoFn.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setFeatureSetName(String featureSetName); + + public abstract Builder setFeatureSetVersion(int featureSetVersion); + + public abstract Builder setFieldByName(Map fieldByName); + + public abstract Builder setSuccessTag(TupleTag successTag); + + public abstract Builder setFailureTag(TupleTag failureTag); + + public abstract ValidateFeatureRowDoFn build(); + } + + + @ProcessElement + public void processElement(ProcessContext context) { + String error = null; + String featureSetId = String.format("%s:%d", getFeatureSetName(), getFeatureSetVersion()); + FeatureRow featureRow = context.element(); + if (featureRow.getFeatureSet().equals(featureSetId)) { + + for (FieldProto.Field field : featureRow.getFieldsList()) { + if (!getFieldByName().containsKey(field.getName())) { + error = + String.format( + "FeatureRow contains field '%s' which do not exists in FeatureSet '%s' version '%d'. Please check the FeatureRow data.", + field.getName(), getFeatureSetName(), getFeatureSetVersion()); + break; + } + // If value is set in the FeatureRow, make sure the value type matches + // that defined in FeatureSetSpec + if (!field.getValue().getValCase().equals(ValCase.VAL_NOT_SET)) { + int expectedTypeFieldNumber = + getFieldByName().get(field.getName()).getType().getNumber(); + int actualTypeFieldNumber = field.getValue().getValCase().getNumber(); + if (expectedTypeFieldNumber != actualTypeFieldNumber) { + error = + String.format( + "FeatureRow contains field '%s' with invalid type '%s'. Feast expects the field type to match that in FeatureSet '%s'. Please check the FeatureRow data.", + field.getName(), + field.getValue().getValCase(), + getFieldByName().get(field.getName()).getType()); + break; + } + } + } + } else { + error = String.format( + "FeatureRow contains invalid feature set id %s. Please check that the feature rows are being published to the correct topic on the feature stream.", + featureSetId); + } + + if (error != null) { + context.output( + getFailureTag(), + FailedElement.newBuilder() + .setTransformName("ValidateFeatureRow") + .setJobName(context.getPipelineOptions().getJobName()) + .setPayload(featureRow.toString()) + .setErrorMessage(error) + .build()); + } else { + context.output(getSuccessTag(), featureRow); + } + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java deleted file mode 100644 index a8ad1d9469f..00000000000 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowsDoFn.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.protobuf.util.Timestamps; -import feast.ingestion.exceptions.ValidationException; -import feast.ingestion.metrics.FeastMetrics; -import feast.ingestion.model.Specs; -import feast.ingestion.model.Values; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.store.serving.FeatureServingFactory; -import feast.store.serving.FeatureServingFactoryService; -import feast.store.warehouse.FeatureWarehouseFactory; -import feast.store.warehouse.FeatureWarehouseFactoryService; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.ValueType; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -public class ValidateFeatureRowsDoFn extends BaseFeatureDoFn { - - private final List featureIds = new ArrayList<>(); - - private Set supportedServingTypes = new HashSet<>(); - private Set supportedWarehouseTypes = new HashSet<>(); - - private Specs specs; - - public ValidateFeatureRowsDoFn(Specs specs) { - this.specs = specs; - } - - @Setup - public void setup() { - ImportSpec importSpec = specs.getImportSpec(); - for (Field field : importSpec.getSchema().getFieldsList()) { - if (!Strings.isNullOrEmpty(field.getFeatureId())) { - featureIds.add(field.getFeatureId()); - } - } - for (FeatureServingFactory store : FeatureServingFactoryService.getAll()) { - supportedServingTypes.add(store.getType()); - } - for (FeatureWarehouseFactory store : FeatureWarehouseFactoryService.getAll()) { - supportedWarehouseTypes.add(store.getType()); - } - } - - @Override - public void processElementImpl(ProcessContext context) { - FeatureRow row = context.element().getRow(); - EntitySpec entitySpec = specs.getEntitySpec(row.getEntityName()); - Preconditions.checkNotNull(entitySpec, "Entity spec not found for " + row.getEntityName()); - - try { - checkArgument(!row.getEntityKey().isEmpty(), "Entity key must not be empty"); - checkArgument(!row.getEntityName().isEmpty(), "Entity name must not be empty"); - - checkArgument( - specs.getEntitySpecs().keySet().contains(row.getEntityName()), - String.format( - "Row entity not found in import spec entities. entity=%s", row.getEntityName())); - - checkArgument(row.hasEventTimestamp(), "Must have eventTimestamp set"); - Timestamps.checkValid(row.getEventTimestamp()); - - checkArgument(row.getFeaturesCount() > 0, "Must have at least one feature set"); - - for (Feature feature : row.getFeaturesList()) { - FeatureSpec featureSpec = specs.getFeatureSpec(feature.getId()); - checkNotNull( - featureSpec, String.format("Feature spec not found featureId=%s", feature.getId())); - - checkArgument( - featureSpec.getEntity().equals(row.getEntityName()), - String.format( - "Feature must have same entity as row. featureId=%s FeatureRow.entityName=%s FeatureSpec.entity=%s", - feature.getId(), row.getEntityName(), featureSpec.getEntity())); - - ValueType.Enum expectedType = featureSpec.getValueType(); - ValueType.Enum actualType = Values.toValueType(feature.getValue()); - checkArgument( - expectedType.equals(actualType), - String.format("Invalid value type, expected %s, actual %s", expectedType, actualType)); - - if (featureIds.size() > 0) { - checkArgument( - featureIds.contains(feature.getId()), - String.format( - "Unexpected feature that was not specified in import spec. featureId=%s", - feature.getId())); - } - } - FeastMetrics.inc(context.element().getRow(), "valid"); - context.output(context.element()); - } catch (IllegalArgumentException e) { - throw new ValidationException(e.getMessage(), e); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WindowRecords.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WindowRecords.java new file mode 100644 index 00000000000..4796c83603c --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WindowRecords.java @@ -0,0 +1,36 @@ +package feast.ingestion.transform.metrics; + +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime; +import org.apache.beam.sdk.transforms.windowing.FixedWindows; +import org.apache.beam.sdk.transforms.windowing.Window; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollection; +import org.joda.time.Duration; + +public class WindowRecords extends + PTransform, PCollection>>> { + + private final long windowSize; + + public WindowRecords(long windowSize) { + this.windowSize = windowSize; + } + + @Override + public PCollection>> expand(PCollection input) { + return input + .apply("Window records", + Window.into(FixedWindows.of(Duration.standardSeconds(windowSize)))) + .apply("Add key", ParDo.of(new DoFn>() { + @ProcessElement + public void processElement(ProcessContext c) { + c.output(KV.of(1, c.element())); + } + })) + .apply("Collect", GroupByKey.create()); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java new file mode 100644 index 00000000000..a3c814158f8 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java @@ -0,0 +1,83 @@ +package feast.ingestion.transform.metrics; + +import com.google.auto.value.AutoValue; +import com.timgroup.statsd.NonBlockingStatsDClient; +import com.timgroup.statsd.StatsDClient; +import com.timgroup.statsd.StatsDClientException; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.ingestion.values.FailedElement; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.values.KV; +import org.slf4j.Logger; + +@AutoValue +public abstract class WriteDeadletterRowMetricsDoFn extends + DoFn>, Void> { + + private static final Logger log = org.slf4j.LoggerFactory + .getLogger(WriteDeadletterRowMetricsDoFn.class); + + private final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; + private final String METRIC_PREFIX = "feast_ingestion"; + private final String STORE_TAG_KEY = "feast_store"; + private final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; + private final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; + + public abstract String getStoreName(); + + public abstract FeatureSetSpec getFeatureSetSpec(); + + public abstract String getStatsdHost(); + + public abstract int getStatsdPort(); + + public StatsDClient statsd; + + public static WriteDeadletterRowMetricsDoFn.Builder newBuilder() { + return new AutoValue_WriteDeadletterRowMetricsDoFn.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setStoreName(String storeName); + + public abstract Builder setFeatureSetSpec(FeatureSetSpec featureSetSpec); + + public abstract Builder setStatsdHost(String statsdHost); + + public abstract Builder setStatsdPort(int statsdPort); + + public abstract WriteDeadletterRowMetricsDoFn build(); + + } + + @Setup + public void setup() { + statsd = new NonBlockingStatsDClient( + METRIC_PREFIX, + getStatsdHost(), + getStatsdPort() + ); + } + + @ProcessElement + public void processElement(ProcessContext c) { + FeatureSetSpec featureSetSpec = getFeatureSetSpec(); + + long rowCount = 0; + for (FailedElement ignored : c.element().getValue()) { + rowCount++; + } + + try { + statsd.count("deadletter_row_count", rowCount, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + } catch (StatsDClientException e) { + log.warn("Unable to push metrics to server", e); + } + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java new file mode 100644 index 00000000000..b37947d936e --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -0,0 +1,90 @@ +package feast.ingestion.transform.metrics; + +import com.google.auto.value.AutoValue; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.ingestion.options.ImportOptions; +import feast.ingestion.values.FailedElement; +import feast.types.FeatureRowProto.FeatureRow; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.PCollectionTuple; +import org.apache.beam.sdk.values.PDone; +import org.apache.beam.sdk.values.TupleTag; +import org.slf4j.Logger; + + +@AutoValue +public abstract class WriteMetricsTransform extends PTransform { + + private static final long WINDOW_SIZE_SECONDS = 15; + private static final Logger log = org.slf4j.LoggerFactory.getLogger(WriteMetricsTransform.class); + + public abstract String getStoreName(); + + public abstract FeatureSetSpec getFeatureSetSpec(); + + public abstract TupleTag getSuccessTag(); + + public abstract TupleTag getFailureTag(); + + public static Builder newBuilder() { + return new AutoValue_WriteMetricsTransform.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setStoreName(String storeName); + + public abstract Builder setFeatureSetSpec(FeatureSetSpec featureSetSpec); + + public abstract Builder setSuccessTag(TupleTag successTag); + + public abstract Builder setFailureTag(TupleTag failureTag); + + public abstract WriteMetricsTransform build(); + } + + @Override + public PDone expand(PCollectionTuple input) { + ImportOptions options = input.getPipeline().getOptions() + .as(ImportOptions.class); + switch (options.getMetricsExporterType()) { + case "statsd": + + input.get(getFailureTag()) + .apply("Window records", + new WindowRecords<>(WINDOW_SIZE_SECONDS)) + .apply("Write deadletter metrics", ParDo.of( + WriteDeadletterRowMetricsDoFn.newBuilder() + .setFeatureSetSpec(getFeatureSetSpec()) + .setStatsdHost(options.getStatsdHost()) + .setStatsdPort(options.getStatsdPort()) + .setStoreName(getStoreName()) + .build())); + + input.get(getSuccessTag()) + .apply("Window records", + new WindowRecords<>(WINDOW_SIZE_SECONDS)) + .apply("Write row metrics", ParDo + .of(WriteRowMetricsDoFn.newBuilder() + .setFeatureSetSpec(getFeatureSetSpec()) + .setStatsdHost(options.getStatsdHost()) + .setStatsdPort(options.getStatsdPort()) + .setStoreName(getStoreName()) + .build())); + + return PDone.in(input.getPipeline()); + case "none": + default: + input.get(getSuccessTag()).apply("Noop", + ParDo.of(new DoFn() { + @ProcessElement + public void processElement(ProcessContext c) { + } + })); + return PDone.in(input.getPipeline()); + } + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java new file mode 100644 index 00000000000..d3401291b32 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -0,0 +1,118 @@ +package feast.ingestion.transform.metrics; + +import com.google.auto.value.AutoValue; +import com.timgroup.statsd.NonBlockingStatsDClient; +import com.timgroup.statsd.StatsDClient; +import com.timgroup.statsd.StatsDClientException; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto.Field; +import feast.types.ValueProto.Value.ValCase; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.values.KV; +import org.slf4j.Logger; + +@AutoValue +public abstract class WriteRowMetricsDoFn extends DoFn>, Void> { + + private static final Logger log = org.slf4j.LoggerFactory.getLogger(WriteRowMetricsDoFn.class); + + private final String METRIC_PREFIX = "feast_ingestion"; + private final String STORE_TAG_KEY = "feast_store"; + private final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; + private final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; + private final String FEATURE_TAG_KEY = "feast_feature_name"; + private final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; + + public abstract String getStoreName(); + + public abstract FeatureSetSpec getFeatureSetSpec(); + + public abstract String getStatsdHost(); + + public abstract int getStatsdPort(); + + public StatsDClient statsd; + + public static Builder newBuilder() { + return new AutoValue_WriteRowMetricsDoFn.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + public abstract Builder setStoreName(String storeName); + + public abstract Builder setFeatureSetSpec(FeatureSetSpec featureSetSpec); + + public abstract Builder setStatsdHost(String statsdHost); + + public abstract Builder setStatsdPort(int statsdPort); + + public abstract WriteRowMetricsDoFn build(); + } + + @Setup + public void setup() { + statsd = new NonBlockingStatsDClient( + METRIC_PREFIX, + getStatsdHost(), + getStatsdPort() + ); + } + + @ProcessElement + public void processElement(ProcessContext c) { + FeatureSetSpec featureSetSpec = getFeatureSetSpec(); + + long rowCount = 0; + long missingValueCount = 0; + + try { + for (FeatureRow row : c.element().getValue()) { + rowCount++; + long eventTimestamp = com.google.protobuf.util.Timestamps.toMillis(row.getEventTimestamp()); + + statsd.gauge("feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + + statsd.gauge("feature_row_event_time_epoch_ms", eventTimestamp, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + + for (Field field : row.getFieldsList()) { + if (!field.getValue().getValCase().equals(ValCase.VAL_NOT_SET)) { + statsd.gauge("feature_value_lag_ms", System.currentTimeMillis() - eventTimestamp, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + FEATURE_TAG_KEY + ":" + field.getName(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + } else { + missingValueCount++; + } + } + } + + statsd.count("feature_row_ingested_count", rowCount, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + + statsd.count("feature_row_missing_value_count", missingValueCount, + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetSpec.getName(), + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetSpec.getVersion(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + + } catch (StatsDClientException e) { + log.warn("Unable to push metrics to server", e); + } + } +} diff --git a/ingestion/src/main/java/feast/ingestion/util/ObjectMap.java b/ingestion/src/main/java/feast/ingestion/util/ObjectMap.java deleted file mode 100644 index 91193a25d3d..00000000000 --- a/ingestion/src/main/java/feast/ingestion/util/ObjectMap.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.util; - -import java.util.HashMap; - -public class ObjectMap extends HashMap { - - public ObjectMap tput(String key, Object val) { - this.put(key, val); - return this; - } -} \ No newline at end of file diff --git a/ingestion/src/main/java/feast/ingestion/util/PathUtil.java b/ingestion/src/main/java/feast/ingestion/util/PathUtil.java deleted file mode 100644 index 7f99006d7f6..00000000000 --- a/ingestion/src/main/java/feast/ingestion/util/PathUtil.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.util; - -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Path; -import java.nio.file.Paths; - -public class PathUtil { - - /** - * Gets a path with a schema if present - */ - public static Path getPath(String value) { - if (value.contains("://")) { - try { - return Paths.get(new URI(value)); - } catch (URISyntaxException e) { - throw new IllegalArgumentException(e); - } - } else { - return Paths.get(value); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/util/ProtoUtil.java b/ingestion/src/main/java/feast/ingestion/util/ProtoUtil.java deleted file mode 100644 index 9b71faff276..00000000000 --- a/ingestion/src/main/java/feast/ingestion/util/ProtoUtil.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.util; - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.gson.Gson; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.Message; -import com.google.protobuf.util.JsonFormat; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; - -public class ProtoUtil { - - private ProtoUtil() { - } - - public static T decodeProtoYamlFile(Path path, T prototype) - throws IOException { - String yaml = String.join("\n", Files.readAllLines(path)); - return decodeProtoYaml(yaml, prototype); - } - - public static T decodeProtoYaml(String yamlString, T prototype) - throws IOException { - ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); - ObjectMap map = yamlMapper.readerFor(ObjectMap.class).readValue(yamlString); - ObjectMapper jsonMapper = new ObjectMapper(new JsonFactory()); - String json = jsonMapper.writerFor(ObjectMap.class).writeValueAsString(map); - return decodeProtoJson(json, prototype); - } - - public static T decodeProtoJson(String jsonString, T prototype) - throws IOException { - T.Builder builder = prototype.newBuilderForType(); - JsonFormat.parser().merge(jsonString, builder); - //noinspection unchecked - return (T) builder.build(); - } - - public static String encodeProtoJson(T message) - throws InvalidProtocolBufferException { - return JsonFormat.printer().omittingInsignificantWhitespace().print(message); - } - - public static String encodeProtoYaml(T message) { - try { - String json = encodeProtoJson(message); - ObjectMap objectMap = new Gson().fromJson(json, ObjectMap.class); - ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); - return yamlMapper.writer().writeValueAsString(objectMap); - } catch (JsonProcessingException | InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - } -} diff --git a/ingestion/src/main/java/feast/ingestion/util/DateUtil.java b/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java similarity index 99% rename from ingestion/src/main/java/feast/ingestion/util/DateUtil.java rename to ingestion/src/main/java/feast/ingestion/utils/DateUtil.java index ad779dbea33..07e15056d1a 100644 --- a/ingestion/src/main/java/feast/ingestion/util/DateUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java @@ -15,7 +15,7 @@ * */ -package feast.ingestion.util; +package feast.ingestion.utils; import com.google.protobuf.Timestamp; import java.time.Instant; diff --git a/ingestion/src/main/java/feast/ingestion/util/JsonUtil.java b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java similarity index 97% rename from ingestion/src/main/java/feast/ingestion/util/JsonUtil.java rename to ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java index d32a79d1c42..4f6c9d04380 100644 --- a/ingestion/src/main/java/feast/ingestion/util/JsonUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java @@ -15,7 +15,7 @@ * */ -package feast.ingestion.util; +package feast.ingestion.utils; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; diff --git a/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java b/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java new file mode 100644 index 00000000000..9d735d2e653 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java @@ -0,0 +1,23 @@ +package feast.ingestion.utils; + +import com.google.common.io.Resources; +import java.nio.charset.StandardCharsets; +import org.slf4j.Logger; + +public class ResourceUtil { + private static final String DEADLETTER_SCHEMA_FILE_PATH = "schemas/deadletter_table_schema.json"; + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ResourceUtil.class); + + public static String getDeadletterTableSchemaJson() { + String schemaJson = null; + try { + schemaJson = + Resources.toString( + Resources.getResource(DEADLETTER_SCHEMA_FILE_PATH), StandardCharsets.UTF_8); + } catch (Exception e) { + log.error( + "Unable to read {} file from the resources folder!", DEADLETTER_SCHEMA_FILE_PATH, e); + } + return schemaJson; + } +} diff --git a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java new file mode 100644 index 00000000000..b847e3d4205 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java @@ -0,0 +1,97 @@ +package feast.ingestion.utils; + +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.Subscription; +import feast.ingestion.values.Field; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +public class SpecUtil { + + /** + * Get only feature set specs that matches the subscription + */ + public static List getSubscribedFeatureSets( + List subscriptions, List featureSetSpecs) { + List subscribed = new ArrayList<>(); + for (FeatureSetSpec featureSet : featureSetSpecs) { + for (Subscription sub : subscriptions) { + // Convert wildcard to regex + String subName = sub.getName(); + if (!sub.getName().contains(".*")) { + subName = subName.replace("*", ".*"); + } + + // Match feature set name to pattern + Pattern pattern = Pattern.compile(subName); + if (!pattern.matcher(featureSet.getName()).matches()) { + continue; + } + + // If version is empty, match all + if (sub.getVersion().isEmpty()) { + subscribed.add(featureSet); + break; + } else if (sub.getVersion().startsWith(">") && sub.getVersion().length() > 1) { + // if version starts with >, match only those greater than the version number + int lowerBoundIncl = Integer.parseInt(sub.getVersion().substring(1)); + if (featureSet.getVersion() >= lowerBoundIncl) { + subscribed.add(featureSet); + break; + } + } else { + // If a specific version, match that version alone + int version = Integer.parseInt(sub.getVersion()); + if (featureSet.getVersion() == version) { + subscribed.add(featureSet); + break; + } + } + } + } + return subscribed; + } + + public static List parseFeatureSetSpecJsonList(List jsonList) + throws InvalidProtocolBufferException { + List featureSetSpecs = new ArrayList<>(); + for (String json : jsonList) { + FeatureSetSpec.Builder builder = FeatureSetSpec.newBuilder(); + JsonFormat.parser().merge(json, builder); + featureSetSpecs.add(builder.build()); + } + return featureSetSpecs; + } + + public static List parseStoreJsonList(List jsonList) + throws InvalidProtocolBufferException { + List stores = new ArrayList<>(); + for (String json : jsonList) { + Store.Builder builder = Store.newBuilder(); + JsonFormat.parser().merge(json, builder); + stores.add(builder.build()); + } + return stores; + } + + public static Map getFieldByName(FeatureSetSpec featureSetSpec) { + Map fieldByName = new HashMap<>(); + for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) { + fieldByName.put( + entitySpec.getName(), new Field(entitySpec.getName(), entitySpec.getValueType())); + } + for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + fieldByName.put( + featureSpec.getName(), new Field(featureSpec.getName(), featureSpec.getValueType())); + } + return fieldByName; + } +} diff --git a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java new file mode 100644 index 00000000000..ace686d9e56 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java @@ -0,0 +1,225 @@ +package feast.ingestion.utils; + +import static feast.types.ValueProto.ValueType; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Builder; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.cloud.bigquery.TimePartitioning.Type; +import com.google.common.collect.ImmutableMap; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.types.ValueProto.ValueType.Enum; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.exceptions.JedisConnectionException; + +// TODO: Create partitioned table by default + +/** + * This class is a utility to manage storage backends in Feast. + * + *

Examples when schemas need to be updated: + * + *

    + *
  • when a new entity is registered, a table usually needs to be created + *
  • when a new feature is registered, a column with appropriate data type usually needs to be + * created + *
+ * + *

If the storage backend is a key-value or a schema-less database, however, there may not be a + * need to manage any schemas. This class will not be used in that case. + */ +public class StoreUtil { + private static final Map VALUE_TYPE_TO_STANDARD_SQL_TYPE = + new HashMap<>(); + private static final Logger log = org.slf4j.LoggerFactory.getLogger(StoreUtil.class); + + // Refer to protos/feast/core/Store.proto for the mapping definition. + static { + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.BYTES, StandardSQLTypeName.BYTES); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.STRING, StandardSQLTypeName.STRING); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(ValueType.Enum.INT32, StandardSQLTypeName.INT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(ValueType.Enum.INT64, StandardSQLTypeName.INT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(ValueType.Enum.DOUBLE, StandardSQLTypeName.FLOAT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(ValueType.Enum.FLOAT, StandardSQLTypeName.FLOAT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(ValueType.Enum.BOOL, StandardSQLTypeName.BOOL); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.BYTES_LIST, StandardSQLTypeName.BYTES); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.STRING_LIST, StandardSQLTypeName.STRING); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.INT32_LIST, StandardSQLTypeName.INT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.INT64_LIST, StandardSQLTypeName.INT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.DOUBLE_LIST, StandardSQLTypeName.FLOAT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.FLOAT_LIST, StandardSQLTypeName.FLOAT64); + VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.BOOL_LIST, StandardSQLTypeName.BOOL); + } + + public static void setupStore(Store store, FeatureSetSpec featureSetSpec) { + StoreType storeType = store.getType(); + switch (storeType) { + case REDIS: + StoreUtil.checkRedisConnection(store.getRedisConfig()); + break; + case BIGQUERY: + StoreUtil.setupBigQuery( + featureSetSpec, + store.getBigqueryConfig().getProjectId(), + store.getBigqueryConfig().getDatasetId(), + BigQueryOptions.getDefaultInstance().getService()); + break; + default: + log.warn("Store type '{}' is unsupported", storeType); + break; + } + } + + @SuppressWarnings("DuplicatedCode") + private static TableDefinition createBigQueryTableDefinition(FeatureSetSpec featureSetSpec) { + List fields = new ArrayList<>(); + log.info("Table will have the following fields:"); + + for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) { + Builder builder = + Field.newBuilder( + entitySpec.getName(), VALUE_TYPE_TO_STANDARD_SQL_TYPE.get(entitySpec.getValueType())); + if (entitySpec.getValueTypeValue() >= 7 && entitySpec.getValueTypeValue() <= 17) { + builder.setMode(Mode.REPEATED); + } + Field field = builder.build(); + log.info("- {}", field.toString()); + fields.add(field); + } + for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + Builder builder = + Field.newBuilder( + featureSpec.getName(), + VALUE_TYPE_TO_STANDARD_SQL_TYPE.get(featureSpec.getValueType())); + if (featureSpec.getValueTypeValue() >= 7 && featureSpec.getValueTypeValue() <= 17) { + builder.setMode(Mode.REPEATED); + } + Field field = builder.build(); + log.info("- {}", field.toString()); + fields.add(field); + } + + // Refer to protos/feast/core/Store.proto for reserved fields in BigQuery. + Map> + reservedFieldNameToPairOfStandardSQLTypeAndDescription = + ImmutableMap.of( + "event_timestamp", + Pair.of(StandardSQLTypeName.TIMESTAMP, "Event time for the FeatureRow"), + "created_timestamp", + Pair.of( + StandardSQLTypeName.TIMESTAMP, + "Processing time of the FeatureRow ingestion in Feast"), + "job_id", + Pair.of(StandardSQLTypeName.STRING, "Feast import job ID for the FeatureRow")); + for (Map.Entry> entry : + reservedFieldNameToPairOfStandardSQLTypeAndDescription.entrySet()) { + Field field = + Field.newBuilder(entry.getKey(), entry.getValue().getLeft()) + .setDescription(entry.getValue().getRight()) + .build(); + log.info("- {}", field.toString()); + fields.add(field); + } + + TimePartitioning timePartitioning = + TimePartitioning.newBuilder(Type.DAY).setField("event_timestamp").build(); + log.info("Table partitioning: " + timePartitioning.toString()); + + return StandardTableDefinition.newBuilder() + .setTimePartitioning(timePartitioning) + .setSchema(Schema.of(fields)) + .build(); + } + + /** + * This method ensures that, given a FeatureSetSpec object, the relevant BigQuery table is created + * with the correct schema. + * + *

Refer to protos/feast/core/Store.proto for the derivation of the table name and schema from + * a FeatureSetSpec object. + * + * @param featureSetSpec FeatureSetSpec object + * @param bigqueryProjectId BigQuery project id + * @param bigqueryDatasetId BigQuery dataset id + * @param bigquery BigQuery service object + */ + public static void setupBigQuery( + FeatureSetSpec featureSetSpec, + String bigqueryProjectId, + String bigqueryDatasetId, + BigQuery bigquery) { + + // Ensure BigQuery dataset exists. + DatasetId datasetId = DatasetId.of(bigqueryProjectId, bigqueryDatasetId); + if (bigquery.getDataset(datasetId) == null) { + log.info("Creating dataset '{}' in project '{}'", datasetId.getDataset(), bigqueryProjectId); + bigquery.create(DatasetInfo.of(datasetId)); + } + + String tableName = + String.format("%s_v%d", featureSetSpec.getName(), featureSetSpec.getVersion()) + .replaceAll("-", "_"); + TableId tableId = TableId.of(bigqueryProjectId, datasetId.getDataset(), tableName); + + // Return if there is an existing table + Table table = bigquery.getTable(tableId); + if (table != null) { + log.info( + "Writing to existing BigQuery table '{}:{}.{}'", + bigqueryProjectId, + datasetId.getDataset(), + tableName); + return; + } + + log.info( + "Creating table '{}' in dataset '{}' in project '{}'", + tableId.getTable(), + datasetId.getDataset(), + bigqueryProjectId); + TableDefinition tableDefinition = createBigQueryTableDefinition(featureSetSpec); + TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); + bigquery.create(tableInfo); + } + + /** + * Ensure Redis is accessible, else throw a RuntimeException. + * + * @param redisConfig Plase refer to feast.core.Store proto + */ + public static void checkRedisConnection(RedisConfig redisConfig) { + JedisPool jedisPool = new JedisPool(redisConfig.getHost(), redisConfig.getPort()); + try { + jedisPool.getResource(); + } catch (JedisConnectionException e) { + throw new RuntimeException( + String.format( + "Failed to connect to Redis at host: '%s' port: '%d'. Please check that your Redis is running and accessible from Feast.", + redisConfig.getHost(), redisConfig.getPort())); + } + jedisPool.close(); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java new file mode 100644 index 00000000000..037f7f6296e --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java @@ -0,0 +1,52 @@ +package feast.ingestion.values; + +import com.google.auto.value.AutoValue; +import javax.annotation.Nullable; +import org.apache.beam.sdk.schemas.AutoValueSchema; +import org.apache.beam.sdk.schemas.annotations.DefaultSchema; +import org.joda.time.Instant; + +@AutoValue +// Use DefaultSchema annotation so this AutoValue class can be serialized by Beam +// https://issues.apache.org/jira/browse/BEAM-1891 +// https://github.com/apache/beam/pull/7334 +@DefaultSchema(AutoValueSchema.class) +public abstract class FailedElement { + public abstract Instant getTimestamp(); + + @Nullable + public abstract String getJobName(); + + @Nullable + public abstract String getTransformName(); + + @Nullable + public abstract String getPayload(); + + @Nullable + public abstract String getErrorMessage(); + + @Nullable + public abstract String getStackTrace(); + + public static Builder newBuilder() { + return new AutoValue_FailedElement.Builder().setTimestamp(Instant.now()); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setTimestamp(Instant timestamp); + + public abstract Builder setJobName(String jobName); + + public abstract Builder setTransformName(String transformName); + + public abstract Builder setPayload(String payload); + + public abstract Builder setErrorMessage(String errorMessage); + + public abstract Builder setStackTrace(String stackTrace); + + public abstract FailedElement build(); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java b/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java new file mode 100644 index 00000000000..7ca3a88f104 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java @@ -0,0 +1,100 @@ +package feast.ingestion.values; + +import com.google.common.base.MoreObjects; +import feast.ingestion.coders.FailsafeFeatureRowCoder; +import java.util.Objects; +import org.apache.avro.reflect.Nullable; +import org.apache.beam.sdk.coders.DefaultCoder; + +/** + * Adapted from: + * https://github.com/GoogleCloudPlatform/DataflowTemplates/blob/834c833c65d214a28b1f47b493c8407990c3e717/src/main/java/com/google/cloud/teleport/values/FailsafeElement.java + * + *

The {@link FailsafeFeatureRow} class holds the current value and original value of a record + * within a pipeline. This class allows pipelines to not lose valuable information about an incoming + * record throughout the processing of that record. The use of this class allows for more robust + * dead-letter strategies as the original record information is not lost throughout the pipeline and + * can be output to a dead-letter in the event of a failure during one of the pipelines transforms. + */ +@DefaultCoder(FailsafeFeatureRowCoder.class) +public class FailsafeFeatureRow { + + private final OriginalT originalPayload; + private final CurrentT payload; + @Nullable private String errorMessage; + @Nullable private String stacktrace; + + private FailsafeFeatureRow(OriginalT originalPayload, CurrentT payload) { + this.originalPayload = originalPayload; + this.payload = payload; + } + + public static FailsafeFeatureRow of( + OriginalT originalPayload, CurrentT currentPayload) { + return new FailsafeFeatureRow<>(originalPayload, currentPayload); + } + + public static FailsafeFeatureRow of( + FailsafeFeatureRow other) { + return new FailsafeFeatureRow<>(other.originalPayload, other.payload) + .setErrorMessage(other.getErrorMessage()) + .setStacktrace(other.getStacktrace()); + } + + public OriginalT getOriginalPayload() { + return originalPayload; + } + + public CurrentT getPayload() { + return payload; + } + + public String getErrorMessage() { + return errorMessage; + } + + public FailsafeFeatureRow setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getStacktrace() { + return stacktrace; + } + + public FailsafeFeatureRow setStacktrace(String stacktrace) { + this.stacktrace = stacktrace; + return this; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + final FailsafeFeatureRow other = (FailsafeFeatureRow) obj; + return Objects.deepEquals(this.originalPayload, other.getOriginalPayload()) + && Objects.deepEquals(this.payload, other.getPayload()) + && Objects.deepEquals(this.errorMessage, other.getErrorMessage()) + && Objects.deepEquals(this.stacktrace, other.getStacktrace()); + } + + @Override + public int hashCode() { + return Objects.hash(originalPayload, payload, errorMessage, stacktrace); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("originalPayload", originalPayload) + .add("payload", payload) + .add("errorMessage", errorMessage) + .add("stacktrace", stacktrace) + .toString(); + } +} diff --git a/ingestion/src/main/java/feast/ingestion/values/Field.java b/ingestion/src/main/java/feast/ingestion/values/Field.java new file mode 100644 index 00000000000..3550879ba3d --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/values/Field.java @@ -0,0 +1,30 @@ +package feast.ingestion.values; + +import feast.types.ValueProto.ValueType; +import java.io.Serializable; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.DefaultCoder; + +/** + * Field class represents {@link feast.types.FieldProto.Field} but without value. + * + *

The use for this class is mainly for validating the Fields in FeatureRow. + */ +@DefaultCoder(AvroCoder.class) +public class Field implements Serializable { + private final String name; + private final ValueType.Enum type; + + public Field(String name, ValueType.Enum type) { + this.name = name; + this.type = type; + } + + public String getName() { + return name; + } + + public ValueType.Enum getType() { + return type; + } +} diff --git a/ingestion/src/main/java/feast/ingestion/values/PFeatureRows.java b/ingestion/src/main/java/feast/ingestion/values/PFeatureRows.java deleted file mode 100644 index 86a710ddae6..00000000000 --- a/ingestion/src/main/java/feast/ingestion/values/PFeatureRows.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.ingestion.values; - -import feast.ingestion.transform.fn.BaseFeatureDoFn; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; -import lombok.AllArgsConstructor; -import lombok.Value; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.transforms.ParDo.MultiOutput; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionList; -import org.apache.beam.sdk.values.PCollectionTuple; -import org.apache.beam.sdk.values.PInput; -import org.apache.beam.sdk.values.POutput; -import org.apache.beam.sdk.values.PValue; -import org.apache.beam.sdk.values.TupleTag; -import org.apache.beam.sdk.values.TupleTagList; - -@AllArgsConstructor -@Value -@Slf4j -public class PFeatureRows implements PInput, POutput { - - public static final TupleTag MAIN_TAG = new TupleTag<>(); - public static final TupleTag ERRORS_TAG = new TupleTag<>(); - private static final AtomicInteger counter = new AtomicInteger(); - private PCollection main; - private PCollection errors; - - public static PFeatureRows of(PCollection input) { - Pipeline pipeline = input.getPipeline(); - Create.Values empty = Create.empty(ProtoCoder.of(FeatureRowExtended.class)); - return new PFeatureRows(input, - pipeline.apply(input.getName() + "/empty.errors" + counter.incrementAndGet(), empty)); - } - - public static PFeatureRows of( - PCollection input, PCollection errors) { - return new PFeatureRows(input, errors); - } - - @Override - public Pipeline getPipeline() { - return main.getPipeline(); - } - - @Override - public Map, PValue> expand() { - Map, PValue> expanded = new HashMap<>(); - expanded.put(MAIN_TAG, main); - expanded.put(ERRORS_TAG, errors); - return expanded; - } - - @Override - public void finishSpecifyingOutput( - String transformName, PInput input, PTransform transform) { - } - - /** - * @return new PFeatureRows, which has any tagged errors and retries in DoFn added to the errors - * and retries gathered so far. - */ - public PFeatureRows applyDoFn(String name, BaseFeatureDoFn doFn) { - MultiOutput transform = - ParDo.of(doFn.withTransformName(name)) - .withOutputTags(MAIN_TAG, TupleTagList.of(ERRORS_TAG)); - - PCollectionTuple transformed = Pipeline.applyTransform(name, main, transform); - - PCollection outMain = - transformed.get(MAIN_TAG).setCoder(ProtoCoder.of(FeatureRowExtended.class)); - - PCollection outErrors = - PCollectionList.of( - transformed.get(ERRORS_TAG).setCoder(ProtoCoder.of(FeatureRowExtended.class))) - .and(errors) - .apply(name + "/Flatten errors", Flatten.pCollections()) - .setCoder(ProtoCoder.of(FeatureRowExtended.class)); - return new PFeatureRows(outMain, outErrors); - } - - public PFeatureRows apply(String name, PTransform transform) { - return Pipeline.applyTransform(name, this, transform); - } -} diff --git a/ingestion/src/main/java/feast/options/Options.java b/ingestion/src/main/java/feast/options/Options.java deleted file mode 100644 index 47077810c30..00000000000 --- a/ingestion/src/main/java/feast/options/Options.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.options; - -import java.io.Serializable; - -/** - * interface for identifying classes that can use the OptionsParser for extra type safety - */ -public interface Options extends Serializable { - -} diff --git a/ingestion/src/main/java/feast/options/OptionsParser.java b/ingestion/src/main/java/feast/options/OptionsParser.java deleted file mode 100644 index 8400f5d423d..00000000000 --- a/ingestion/src/main/java/feast/options/OptionsParser.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.options; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.module.jsonSchema.JsonSchema; -import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; -import com.google.common.collect.Lists; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Set; -import javax.validation.ConstraintViolation; -import javax.validation.Validation; -import javax.validation.Validator; -import javax.validation.ValidatorFactory; - -public class OptionsParser { - - private static final ObjectMapper strictMapper = new ObjectMapper(); - private static final ObjectMapper lenientMapper = new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - private static final Validator validator; - - static { - try (ValidatorFactory validatorFactory = Validation.buildDefaultValidatorFactory()) { - validator = validatorFactory.getValidator(); - } - } - - /** - * Return a json schema string representing an options class for error messages - */ - static String getJsonSchema(Class optionsClass) { - JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(strictMapper); - JsonSchema schema = null; - try { - schema = schemaGen.generateSchema(optionsClass); - schema.setId(null); // clear the ID as it's visual noise - return strictMapper.writer().forType(JsonSchema.class).writeValueAsString(schema); - } catch (IOException e) { - return ""; - } - } - - - private static T parse(Map optionsMap, Class clazz, - boolean lenient) { - ObjectMapper mapper = lenient ? lenientMapper : strictMapper; - List messages = Lists.newArrayList(); - T options; - try { - options = mapper.convertValue(optionsMap, clazz); - } catch (IllegalArgumentException e) { - messages.add("Expecting options convertible to schema: " + getJsonSchema(clazz)); - try { - messages.add("Got " + mapper.writer().writeValueAsString(optionsMap)); - } catch (JsonProcessingException ee) { - // - } - throw new IllegalArgumentException(String.join(", ", messages), e); - } - Set> violations = validator.validate(options); - if (violations.size() > 0) { - messages.add("Expecting options convertible to schema: " + getJsonSchema(clazz)); - for (ConstraintViolation violation : violations) { - messages.add( - String.format( - "property \"%s\" %s", violation.getPropertyPath(), violation.getMessage())); - } - throw new IllegalArgumentException(String.join(", ", messages)); - } - return options; - } - - /** - * Construct a class from string options and validate with any javax validation annotations, - * unknown options are ignored - */ - public static T lenientParse(Map optionsMap, Class clazz) { - return parse(optionsMap, clazz, true); - } - - - /** - * Construct a class from string options and validate with any javax validation annotations - */ - public static T parse(Map optionsMap, Class clazz) { - return parse(optionsMap, clazz, false); - } -} diff --git a/ingestion/src/main/java/feast/options/Validation.java b/ingestion/src/main/java/feast/options/Validation.java deleted file mode 100644 index 892c9f36fa6..00000000000 --- a/ingestion/src/main/java/feast/options/Validation.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.options; - -import static java.lang.annotation.ElementType.ANNOTATION_TYPE; -import static java.lang.annotation.ElementType.CONSTRUCTOR; -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.METHOD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.ElementType.TYPE_USE; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; -import javax.validation.Constraint; -import javax.validation.ConstraintValidator; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Payload; -import javax.validation.ReportAsSingleViolation; -import org.joda.time.format.ISOPeriodFormat; - -public @interface Validation { - @Documented - @ReportAsSingleViolation - @Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE}) - @Constraint(validatedBy = ISO8601Duration.ISO8601DurationValidator.class) - @Retention(RUNTIME) - @interface ISO8601Duration { - - String message() default "must match duration format for ISO 8601 standard"; - - Class[] groups() default {}; - - Class[] payload() default {}; - - @Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE}) - @Retention(RUNTIME) - @Documented - @interface List { - ISO8601Duration[] value(); - } - - class ISO8601DurationValidator implements ConstraintValidator { - - @Override - public boolean isValid(String value, ConstraintValidatorContext context) { - if (value == null) { - return true; - } - try { - ISOPeriodFormat.standard().parsePeriod(value); - return true; - } catch (Throwable e) { - return false; - } - } - } - } -} diff --git a/ingestion/src/main/java/feast/source/FeatureSource.java b/ingestion/src/main/java/feast/source/FeatureSource.java deleted file mode 100644 index e4c0590b32b..00000000000 --- a/ingestion/src/main/java/feast/source/FeatureSource.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source; - -import feast.types.FeatureRowProto.FeatureRow; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - -public abstract class FeatureSource extends PTransform> { - -} diff --git a/ingestion/src/main/java/feast/source/FeatureSourceFactory.java b/ingestion/src/main/java/feast/source/FeatureSourceFactory.java deleted file mode 100644 index fc4d64418cd..00000000000 --- a/ingestion/src/main/java/feast/source/FeatureSourceFactory.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source; - -import feast.specs.ImportSpecProto.ImportSpec; - -/** - * A FeatureSourceFactory creates FeatureSource instances, which can read FeatureRow messages from a - * source location. - */ -public interface FeatureSourceFactory { - - String getType(); - - FeatureSource create(ImportSpec importSpec); -} diff --git a/ingestion/src/main/java/feast/source/FeatureSourceFactoryService.java b/ingestion/src/main/java/feast/source/FeatureSourceFactoryService.java deleted file mode 100644 index 18917cb1dac..00000000000 --- a/ingestion/src/main/java/feast/source/FeatureSourceFactoryService.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source; - -import avro.shaded.com.google.common.collect.Lists; -import com.google.common.collect.Iterators; -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class FeatureSourceFactoryService { - - private static ServiceLoader serviceLoader = ServiceLoader - .load(FeatureSourceFactory.class); - private static List manuallyRegistered = new ArrayList<>(); - - static { - for (FeatureSourceFactory source : getAll()) { - log.info("FeatureSourceFactory type found: " + source.getType()); - } - } - - public static List getAll() { - return Lists.newArrayList( - Iterators.concat(manuallyRegistered.iterator(), serviceLoader.iterator())); - } - - /** - * Get store of the given subclass. - */ - public static T get(Class clazz) { - for (FeatureSourceFactory store : getAll()) { - if (clazz.isInstance(store)) { - //noinspection unchecked - return (T) store; - } - } - return null; - } - - public static void register(FeatureSourceFactory store) { - manuallyRegistered.add(store); - } -} diff --git a/ingestion/src/main/java/feast/source/bigquery/BigQueryFeatureSource.java b/ingestion/src/main/java/feast/source/bigquery/BigQueryFeatureSource.java deleted file mode 100644 index b183ffbddc0..00000000000 --- a/ingestion/src/main/java/feast/source/bigquery/BigQueryFeatureSource.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.bigquery; - -import static com.google.common.base.Preconditions.checkArgument; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import feast.options.Options; -import feast.options.OptionsParser; -import feast.source.FeatureSource; -import feast.source.FeatureSourceFactory; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.List; -import javax.validation.constraints.NotEmpty; -import lombok.AllArgsConstructor; -import lombok.NonNull; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - - -/** - * Transform for processing BigQuery tables and producing FeatureRow messages. - * - *

This transform asserts that the import spec is for only one entity, as all columns must have - * the same entity. The columns names in the import spec will be used for selecting columns from the - * BigQuery table, but it still scans the whole row. - * - *

The output is a PCollection of {@link feast.types.FeatureRowProto.FeatureRow FeatureRows}, - * where each feature and the entity key {@link feast.types.ValueProto.Value Value} in the - * FeatureRow is taken from a column in the BigQuery table and set with the closest type to the - * BigQuery column schema that is available in {@link feast.types.ValueProto.ValueType ValueType}. - * - *

Note a small gotcha is that because Integers and Numerics in BigQuery are 64 bits, these are - * always cast to INT64 and DOUBLE respectively. - * - *

Downstream these will fail validation if the corresponding {@link - * feast.specs.FeatureSpecProto.FeatureSpec FeatureSpec} has a 32 bit type. - */ -@AllArgsConstructor -public class BigQueryFeatureSource extends FeatureSource { - - private static final String BIGQUERY_FEATURE_SOURCE_TYPE = "bigquery"; - - @NonNull - private final ImportSpec importSpec; - - @Override - public PCollection expand(PInput input) { - BigQuerySourceOptions options = OptionsParser - .parse(importSpec.getSourceOptionsMap(), BigQuerySourceOptions.class); - - List entities = importSpec.getEntitiesList(); - Preconditions.checkArgument( - entities.size() == 1, "exactly 1 entity must be set for BigQuery import"); - - String url = String.format("%s:%s.%s", options.project, options.dataset, options.table); - - return input - .getPipeline() - .apply( - BigQueryIO.read(new BigQueryToFeatureRowFn(importSpec)).from(url)); - } - - - public static class BigQuerySourceOptions implements Options { - - @NotEmpty - public String project; - @NotEmpty - public String dataset; - @NotEmpty - public String table; - } - - - @AutoService(FeatureSourceFactory.class) - public static class Factory implements FeatureSourceFactory { - - @Override - public String getType() { - return BIGQUERY_FEATURE_SOURCE_TYPE; - } - - @Override - public FeatureSource create(ImportSpec importSpec) { - checkArgument(importSpec.getType().equals(getType())); - return new BigQueryFeatureSource(importSpec); - } - } -} diff --git a/ingestion/src/main/java/feast/source/bigquery/BigQueryToFeatureRowFn.java b/ingestion/src/main/java/feast/source/bigquery/BigQueryToFeatureRowFn.java deleted file mode 100644 index dfe26c45b77..00000000000 --- a/ingestion/src/main/java/feast/source/bigquery/BigQueryToFeatureRowFn.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.bigquery; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.cloud.bigquery.LegacySQLTypeName; -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.common.collect.Maps; -import com.google.protobuf.Timestamp; -import feast.store.warehouse.bigquery.ValueBigQueryBuilder; -import java.util.Map; -import org.apache.avro.generic.GenericRecord; -import org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord; -import org.apache.beam.sdk.transforms.SerializableFunction; -import feast.ingestion.model.Values; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; - -/** - * This is a serializable function used with the BigQueryIO for fetching feature rows directly from - * BigQuery - */ -public class BigQueryToFeatureRowFn - implements SerializableFunction { - - private final ImportSpec importSpec; - private final Map fields; - - public BigQueryToFeatureRowFn(ImportSpec importSpec) { - this.importSpec = importSpec; - fields = Maps.newHashMap(); - for (Field field : importSpec.getSchema().getFieldsList()) { - fields.put(field.getName().isEmpty() ? field.getFeatureId() : field.getName(), field); - } - } - - @Override - public FeatureRow apply(SchemaAndRecord input) { - GenericRecord record = input.getRecord(); - TableSchema schema = input.getTableSchema(); - - FeatureRow.Builder builder = FeatureRow.newBuilder(); - builder.setEntityName(importSpec.getEntities(0)); - - String entityKeyColumn = importSpec.getSchema().getEntityIdColumn(); - String timestampColumn = importSpec.getSchema().getTimestampColumn(); - - for (TableFieldSchema tableFieldSchema : schema.getFields()) { - String name = tableFieldSchema.getName(); - String bigQueryType = tableFieldSchema.getType(); - - if (!fields.containsKey(name)) { - continue; - } - Field field = fields.get(name); - - Value value; - StandardSQLTypeName bqType = LegacySQLTypeName.valueOfStrict(bigQueryType).getStandardType(); - value = ValueBigQueryBuilder.valueOf(record.get(name), bqType); - - if (name.equals(entityKeyColumn)) { - builder.setEntityKey(Values.asString(value).getStringVal()); - } else if (name.equals(timestampColumn)) { - builder.setEventTimestamp(value.getTimestampVal()); - } else if (!field.getFeatureId().isEmpty()) { - builder.addFeatures(Feature.newBuilder().setId(field.getFeatureId()).setValue(value)); - } - } - if (!importSpec.getSchema().getTimestampValue().equals(Timestamp.getDefaultInstance())) { - builder.setEventTimestamp(importSpec.getSchema().getTimestampValue()); - } - return builder.build(); - } -} diff --git a/ingestion/src/main/java/feast/source/common/ValueMapToFeatureRowTransform.java b/ingestion/src/main/java/feast/source/common/ValueMapToFeatureRowTransform.java deleted file mode 100644 index 490eb62bd61..00000000000 --- a/ingestion/src/main/java/feast/source/common/ValueMapToFeatureRowTransform.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.common; - -import com.google.common.base.Strings; -import com.google.common.collect.Maps; -import feast.ingestion.metrics.FeastMetrics; -import feast.ingestion.model.Values; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.Schema; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.Value.ValCase; -import java.util.Map; -import java.util.Map.Entry; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; - -public class ValueMapToFeatureRowTransform extends - PTransform>, PCollection> { - - private String entity; - private Schema schema; - - public ValueMapToFeatureRowTransform(String entity, Schema schema) { - this.entity = entity; - this.schema = schema; - } - - @Override - public PCollection expand(PCollection> input) { - return input.apply(ParDo.of(ValueMapToFeatureRowDoFn.of(entity, schema))) - .setCoder(ProtoCoder.of(FeatureRow.class)); - } - - public static class ValueMapToFeatureRowDoFn extends - DoFn, FeatureRow> { - - private String entityIdColumn; - private String timestampColumn; - private com.google.protobuf.Timestamp timestampValue; - private String entity; - private Map fields; - - private ValueMapToFeatureRowDoFn(String entity, Map fields, - String entityIdColumn, - String timestampColumn, - com.google.protobuf.Timestamp timestampValue) { - this.entity = entity; - this.fields = fields; - this.entityIdColumn = entityIdColumn; - this.timestampColumn = timestampColumn; - this.timestampValue = timestampValue; - } - - public static ValueMapToFeatureRowDoFn of(String entity, Schema schema) { - final Map fields = Maps.newHashMap(); - for (Field field : schema.getFieldsList()) { - String displayName = !field.getName().isEmpty() ? field.getName() : field.getFeatureId(); - fields.put(displayName, field); - } - return new ValueMapToFeatureRowDoFn(entity, fields, schema.getEntityIdColumn(), - schema.getTimestampColumn(), schema.getTimestampValue()); - } - - @ProcessElement - public void processElement(ProcessContext context) { - FeatureRow.Builder builder = FeatureRow.newBuilder(); - try { - Map valueMap = context.element(); - builder.setEntityName(entity); - for (Entry entry : valueMap.entrySet()) { - String name = entry.getKey(); - Value value = entry.getValue(); - Field field = fields.get(name); - - if (value == null || value.getValCase().equals(ValCase.VAL_NOT_SET)) { - continue; - } - - // A feature can only be one of these things - if (entityIdColumn.equals(name)) { - builder.setEntityKey(Values.asString(value).getStringVal()); - } else if (timestampColumn.equals(name)) { - builder.setEventTimestamp(Values.asTimestamp(value).getTimestampVal()); - } else if (!Strings.isNullOrEmpty(field.getFeatureId())) { - String featureId = field.getFeatureId(); - builder.addFeatures( - Feature.newBuilder().setId(featureId).setValue(value)); - } - // else silently ignore this column - } - if (!timestampValue.equals(com.google.protobuf.Timestamp.getDefaultInstance())) { - // This overrides any column event timestamp. - builder.setEventTimestamp(timestampValue); - } - context.output(builder.build()); - } catch (Exception e) { - FeastMetrics.inc(builder.build(), "input_errors"); - } - } - } -} \ No newline at end of file diff --git a/ingestion/src/main/java/feast/source/csv/CsvFileFeatureSource.java b/ingestion/src/main/java/feast/source/csv/CsvFileFeatureSource.java deleted file mode 100644 index 0d713134f39..00000000000 --- a/ingestion/src/main/java/feast/source/csv/CsvFileFeatureSource.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.csv; - -import static com.google.common.base.Preconditions.checkArgument; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import feast.options.Options; -import feast.options.OptionsParser; -import feast.source.FeatureSource; -import feast.source.FeatureSourceFactory; -import feast.source.common.ValueMapToFeatureRowTransform.ValueMapToFeatureRowDoFn; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.List; -import java.util.Map; -import javax.validation.constraints.NotEmpty; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.io.TextIO; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - - -/** - * Transform for processing CSV text jsonfiles and producing FeatureRow messages. CSV jsonfiles with - * headers are not supported. - * - *

This transform asserts that the import spec is for only one entity, as all columns must have - * the same entity. There must be the same number of columns in the CSV jsonfiles as in the import - * spec. - * - *

The output is a PCollection of {@link feast.types.FeatureRowProto.FeatureRow FeatureRows}, - * where every feature and entity {@link feast.types.ValueProto.Value Value} in the FeatureRow is a - * string (set via Value.stringVal). - */ -@AllArgsConstructor -public class CsvFileFeatureSource extends FeatureSource { - - public static final String CSV_FILE_FEATURE_SOURCE_TYPE = "file.csv"; - - private final ImportSpec importSpec; - - @Override - public PCollection expand(PInput input) { - CsvFileFeatureSourceOptions options = OptionsParser - .parse(importSpec.getSourceOptionsMap(), CsvFileFeatureSourceOptions.class); - List entities = importSpec.getEntitiesList(); - Preconditions.checkArgument( - entities.size() == 1, "exactly 1 entity must be set for CSV import"); - Schema schema = importSpec.getSchema(); - String entity = entities.get(0); - - final List fieldNames = Lists.newArrayList(); - final Map fields = Maps.newHashMap(); - for (Field field : schema.getFieldsList()) { - String displayName = !field.getName().isEmpty() ? field.getName() : field.getFeatureId(); - fieldNames.add(displayName); - fields.put(displayName, field); - } - - String path = options.path; - String entityIdColumn = schema.getEntityIdColumn(); - Preconditions.checkArgument( - !Strings.isNullOrEmpty(entityIdColumn), "entity id column must be set"); - String timestampColumn = schema.getTimestampColumn(); - Preconditions.checkArgument( - schema.getFieldsList().size() > 0, - "CSV import needs schema with a least one field specified"); - - if (!Strings.isNullOrEmpty(timestampColumn)) { - Preconditions.checkArgument(fieldNames.contains(timestampColumn), - String.format("timestampColumn %s, does not match any field", timestampColumn)); - } - PCollection text = input.getPipeline().apply(TextIO.read().from(path)); - return text.apply(ParseCsvTransform.builder().header(fieldNames).build()) - .apply(new StringToValueMapTransform()) - .apply(ParDo.of(ValueMapToFeatureRowDoFn.of(entity, schema))); - } - - public static class CsvFileFeatureSourceOptions implements Options { - - @NotEmpty - public String path; - } - - @AutoService(FeatureSourceFactory.class) - public static class Factory implements FeatureSourceFactory { - - @Override - public String getType() { - return CSV_FILE_FEATURE_SOURCE_TYPE; - } - - @Override - public FeatureSource create(ImportSpec importSpec) { - checkArgument(importSpec.getType().equals(getType())); - return new CsvFileFeatureSource(importSpec); - } - } -} diff --git a/ingestion/src/main/java/feast/source/csv/ParseCsvTransform.java b/ingestion/src/main/java/feast/source/csv/ParseCsvTransform.java deleted file mode 100644 index 95bcdfc5081..00000000000 --- a/ingestion/src/main/java/feast/source/csv/ParseCsvTransform.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.csv; - -import java.io.IOException; -import java.io.Serializable; -import java.io.StringReader; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Builder; -import lombok.NonNull; -import org.apache.beam.sdk.coders.MapCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; - -/** - * Transform for reading text CSV jsonfiles into a map of strings to strings. CSV format is assumed - * to be RFC4180. - */ -@Builder -public class ParseCsvTransform extends - PTransform, PCollection>> { - - @NonNull - private List header; - - @Override - public PCollection> expand(PCollection input) { - CSVLineParser csvLineParser = new CSVLineParser(header); - return input.apply(ParDo.of(new DoFn>() { - @ProcessElement - public void processElement(ProcessContext context) { - String line = context.element(); - if (line != null && !line.isEmpty()) { - for (StringMap map : csvLineParser.records(line)) { - context.output(map); - } - } - } - })).setCoder(MapCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())); - } - - public static class StringMap extends HashMap { - - public StringMap() { - super(); - } - - public StringMap(Map map) { - super(); - this.putAll(map); - } - - public StringMap thisput(String key, String value) { - this.put(key, value); - return this; - } - } - - /** - * This is a helper class to make it easy to use the commons csv parser object for one line - * without recreating it. - */ - public static class CSVLineParser implements Serializable { - - private List header; - - public CSVLineParser(List header) { - this.header = header; - } - - public List records(String input) { - try { - CSVParser parser = CSVFormat.RFC4180 - .withHeader(header.toArray(new String[]{})) - .parse(new StringReader(input)); - return parser.getRecords().stream() - .map(record -> new StringMap(record.toMap())) - .collect(Collectors.toList()); - } catch (IOException e) { - String message = String.format("OOPS I couldn't parse the csv line! '%s'", input); - throw new RuntimeException(message); - } - } - } -} diff --git a/ingestion/src/main/java/feast/source/csv/StringToValueMapTransform.java b/ingestion/src/main/java/feast/source/csv/StringToValueMapTransform.java deleted file mode 100644 index c583a077710..00000000000 --- a/ingestion/src/main/java/feast/source/csv/StringToValueMapTransform.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.csv; - -import com.google.common.base.Strings; -import feast.ingestion.model.Values; -import feast.types.ValueProto.Value; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; -import org.apache.beam.sdk.coders.MapCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; - -public class StringToValueMapTransform extends - PTransform>, PCollection>> { - - public static MapCoder VALUE_MAP_CODER = MapCoder.of(StringUtf8Coder.of(), - ProtoCoder.of(Value.class)); - - @Override - public PCollection> expand(PCollection> input) { - return input.apply(ParDo.of(new StringToValueMapDoFn())) - .setCoder(VALUE_MAP_CODER); - } - - public static class StringToValueMapDoFn extends - DoFn, Map> { - - @ProcessElement - public void processElement(ProcessContext context) { - Map row = new HashMap<>(); - for (Entry entry : context.element().entrySet()) { - String stringVal = entry.getValue(); - if (!Strings.isNullOrEmpty(stringVal)) { - row.put(entry.getKey(), Values.ofString(entry.getValue())); - } - } - context.output(row); - } - } -} \ No newline at end of file diff --git a/ingestion/src/main/java/feast/source/json/JsonFileFeatureSource.java b/ingestion/src/main/java/feast/source/json/JsonFileFeatureSource.java deleted file mode 100644 index 3420d1f5639..00000000000 --- a/ingestion/src/main/java/feast/source/json/JsonFileFeatureSource.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.json; - -import static com.google.common.base.Preconditions.checkArgument; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import feast.options.Options; -import feast.options.OptionsParser; -import feast.source.FeatureSource; -import feast.source.FeatureSourceFactory; -import feast.source.common.ValueMapToFeatureRowTransform; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.types.FeatureRowProto.FeatureRow; -import javax.validation.constraints.NotEmpty; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.io.TextIO; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - - -/** - * Transform for processing JSON text jsonfiles and that contain JSON serialised FeatureRow - * messages, one per line. - * - *

This transform asserts that the import spec is for only one entity, as all columns must - * have the same entity. - * - *

The output is a PCollection of {@link feast.types.FeatureRowProto.FeatureRow FeatureRows}. - */ -@AllArgsConstructor -public class JsonFileFeatureSource extends FeatureSource { - - public static final String JSON_FILE_FEATURE_SOURCE_TYPE = "file.json"; - - private final ImportSpec importSpec; - - @Override - public PCollection expand(PInput input) { - JsonFileFeatureSourceOptions options = OptionsParser - .parse(importSpec.getSourceOptionsMap(), JsonFileFeatureSourceOptions.class); - PCollection jsonLines = input.getPipeline().apply(TextIO.read().from(options.path)); - - Preconditions - .checkArgument(importSpec.getEntitiesCount() == 1, "Import spec must have only 1 entity"); - String entity = importSpec.getEntities(0); - Schema schema = importSpec.getSchema(); - - return jsonLines.apply(new ParseJsonTransform()) - .apply(new ValueMapToFeatureRowTransform(entity, schema)); - } - - public static class JsonFileFeatureSourceOptions implements Options { - - @NotEmpty - public String path; - } - - @AutoService(FeatureSourceFactory.class) - public static class Factory implements FeatureSourceFactory { - - @Override - public String getType() { - return JSON_FILE_FEATURE_SOURCE_TYPE; - } - - @Override - public FeatureSource create(ImportSpec importSpec) { - checkArgument(importSpec.getType().equals(getType())); - - return new JsonFileFeatureSource(importSpec); - } - } - -} - diff --git a/ingestion/src/main/java/feast/source/json/ParseJsonTransform.java b/ingestion/src/main/java/feast/source/json/ParseJsonTransform.java deleted file mode 100644 index ee2b0035853..00000000000 --- a/ingestion/src/main/java/feast/source/json/ParseJsonTransform.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.json; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonDeserializationContext; -import com.google.gson.JsonDeserializer; -import com.google.gson.JsonElement; -import com.google.gson.JsonParseException; -import com.google.gson.JsonPrimitive; -import com.google.gson.reflect.TypeToken; -import feast.ingestion.model.Values; -import feast.types.ValueProto.Value; -import java.lang.reflect.Type; -import java.util.Map; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; - -public class ParseJsonTransform extends - PTransform, PCollection>> { - - @Override - public PCollection> expand(PCollection input) { - return input.apply(ParDo.of(new ParseJsonDoFn())); - } - - - public static class ParseJsonDoFn extends DoFn> { - - private transient Gson gson; - private transient Type valueMapType; - - @ProcessElement - public void processElement(ProcessContext context) { - context.output(parseJson(context.element())); - } - - Map parseJson(String json) { - if (gson == null) { - GsonBuilder gsonBuilder = new GsonBuilder(); - gsonBuilder.registerTypeAdapter(Value.class, new ValueDeserializer()); - gson = gsonBuilder.create(); - valueMapType = new TypeToken>() { - }.getType(); - } - return gson.fromJson(json, valueMapType); - } - } - - private static class ValueDeserializer implements JsonDeserializer { - - @Override - public Value deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) - throws JsonParseException { - if (json.isJsonNull()) { - return Value.newBuilder().build(); // return UNKNOWN value. - } else if (json.isJsonPrimitive()) { - JsonPrimitive primitive = json.getAsJsonPrimitive(); - if (primitive.isBoolean()) { - return Values.ofBool(primitive.getAsBoolean()); - } else if (primitive.isString()) { - return Values.ofString(primitive.getAsString()); - } else { - // Find out if it is an int type - Double doubleVal = primitive.getAsDouble(); - Long int64Val = primitive.getAsLong(); - - if (Double.compare(int64Val.doubleValue(), doubleVal) != 0.0) { - return Values.ofDouble(doubleVal); - } else { - return Values.ofInt64(int64Val); - } - } - } else if (json.isJsonArray() || json.isJsonObject()) { - return Values.ofString(json.toString()); - } else { - throw new JsonParseException("Unknown json element type "); - } - } - } -} \ No newline at end of file diff --git a/ingestion/src/main/java/feast/source/kafka/FeatureRowDeserializer.java b/ingestion/src/main/java/feast/source/kafka/FeatureRowDeserializer.java deleted file mode 100644 index d62b959adea..00000000000 --- a/ingestion/src/main/java/feast/source/kafka/FeatureRowDeserializer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.kafka; - -import com.google.protobuf.InvalidProtocolBufferException; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.Map; -import org.apache.kafka.common.errors.SerializationException; -import org.apache.kafka.common.serialization.Deserializer; - -/** - * Deserializer for Kafka to deserialize Protocol Buffers messages - * - * @param Protobuf message type - */ -public class FeatureRowDeserializer implements Deserializer { - - @Override - public void configure(Map configs, boolean isKey) {} - - @Override - public FeatureRow deserialize(String topic, byte[] data) { - try { - return FeatureRow.parseFrom(data); - } catch (InvalidProtocolBufferException e) { - throw new SerializationException("Error deserializing FeatureRow from Protobuf message", e); - } - } - - @Override - public void close() {} -} diff --git a/ingestion/src/main/java/feast/source/kafka/KafkaFeatureSource.java b/ingestion/src/main/java/feast/source/kafka/KafkaFeatureSource.java deleted file mode 100644 index 5c8ca713fda..00000000000 --- a/ingestion/src/main/java/feast/source/kafka/KafkaFeatureSource.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.kafka; - -import static com.google.common.base.Preconditions.checkArgument; - -import com.google.auto.service.AutoService; -import com.google.common.base.Strings; -import feast.ingestion.options.JobOptions; -import feast.ingestion.transform.fn.FilterFeatureRowDoFn; -import feast.options.Options; -import feast.options.OptionsParser; -import feast.source.FeatureSource; -import feast.source.FeatureSourceFactory; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.validation.constraints.NotEmpty; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.io.kafka.KafkaIO; -import org.apache.beam.sdk.io.kafka.KafkaRecord; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; -import org.apache.kafka.common.serialization.ByteArrayDeserializer; - -/** - * Transform for reading {@link feast.types.FeatureRowProto.FeatureRow FeatureRow} proto messages - * from kafka one or more kafka topics. - */ -@AllArgsConstructor -public class KafkaFeatureSource extends FeatureSource { - - public static final String KAFKA_FEATURE_SOURCE_TYPE = "kafka"; - private ImportSpec importSpec; - - @Override - public PCollection expand(PInput input) { - checkArgument(importSpec.getType().equals(KAFKA_FEATURE_SOURCE_TYPE)); - - KafkaReadOptions options = - OptionsParser.parse(importSpec.getSourceOptionsMap(), KafkaReadOptions.class); - JobOptions jobOptions = OptionsParser.parse(importSpec.getJobOptionsMap(), JobOptions.class); - - List topicsList = new ArrayList<>(Arrays.asList(options.topics.split(","))); - - KafkaIO.Read read = KafkaIO.read() - .withBootstrapServers(options.server) - .withTopics(topicsList) - .withKeyDeserializer(ByteArrayDeserializer.class) - .withValueDeserializer(FeatureRowDeserializer.class); - if (jobOptions.getSampleLimit() > 0) { - read = read.withMaxNumRecords(jobOptions.getSampleLimit()); - } - - PCollection> featureRowRecord = - input.getPipeline().apply(read); - - PCollection featureRow = featureRowRecord.apply( - ParDo.of( - new DoFn, FeatureRow>() { - @ProcessElement - public void processElement(ProcessContext processContext) { - KafkaRecord record = processContext.element(); - processContext.output(record.getKV().getValue()); - } - })); - - if (options.discardUnknownFeatures) { - List featureIds = new ArrayList<>(); - for (Field field : importSpec.getSchema().getFieldsList()) { - String featureId = field.getFeatureId(); - if (!Strings.isNullOrEmpty(featureId)) { - featureIds.add(featureId); - } - } - return featureRow.apply(ParDo.of(new FilterFeatureRowDoFn(featureIds))); - } - return featureRow; - } - - public static class KafkaReadOptions implements Options { - - @NotEmpty - public String server; - @NotEmpty - public String topics; - public boolean discardUnknownFeatures = false; - } - - @AutoService(FeatureSourceFactory.class) - public static class Factory implements FeatureSourceFactory { - - @Override - public String getType() { - return KAFKA_FEATURE_SOURCE_TYPE; - } - - @Override - public FeatureSource create(ImportSpec importSpec) { - checkArgument(importSpec.getType().equals(getType())); - return new KafkaFeatureSource(importSpec); - } - } -} diff --git a/ingestion/src/main/java/feast/source/pubsub/PubSubFeatureSource.java b/ingestion/src/main/java/feast/source/pubsub/PubSubFeatureSource.java deleted file mode 100644 index 36f00074aff..00000000000 --- a/ingestion/src/main/java/feast/source/pubsub/PubSubFeatureSource.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.pubsub; - -import static com.google.common.base.Preconditions.checkArgument; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import feast.ingestion.transform.fn.FilterFeatureRowDoFn; -import feast.options.Options; -import feast.options.OptionsParser; -import feast.source.FeatureSource; -import feast.source.FeatureSourceFactory; -import feast.source.csv.ParseCsvTransform; -import feast.source.json.ParseJsonTransform; -import feast.source.csv.StringToValueMapTransform; -import feast.source.common.ValueMapToFeatureRowTransform; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.ArrayList; -import java.util.List; -import javax.validation.constraints.AssertTrue; -import lombok.Builder; -import lombok.NonNull; -import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PInput; - -/** - * Transform for reading {@link feast.types.FeatureRowProto.FeatureRow FeatureRow} proto messages - * from Cloud PubSub. - * - *

This transform accepts multiple entities in the import spec and expects no columns to be - * specified as it does not need to construct FeatureRows, merely pass them on. - * - *

Because Feast ingestion is stateless, the message event time is simply the processing time, - * there is no need to override it based on any property of the message. - */ -@Builder -public class PubSubFeatureSource extends FeatureSource { - - public static final String PUBSUB_FEATURE_SOURCE_TYPE = "pubsub"; - - @NonNull - private ImportSpec importSpec; - - @Override - public PCollection expand(PInput input) { - checkArgument(importSpec.getType().equals(PUBSUB_FEATURE_SOURCE_TYPE)); - PubSubReadOptions options = - OptionsParser.parse(importSpec.getSourceOptionsMap(), PubSubReadOptions.class); - - PCollection featureRows; - switch (options.messageFormat) { - case FEATURE_ROW: - PubsubIO.Read read = fromSubscriptionOrTopic( - PubsubIO.readProtos(FeatureRow.class), options); - featureRows = input.getPipeline().apply(read); - break; - case CSV: - Preconditions.checkArgument(importSpec.getEntitiesCount() == 1, - "pubsub source with format csv, import spec must have one entity"); - featureRows = input.getPipeline() - .apply(fromSubscriptionOrTopic(PubsubIO.readStrings(), options)) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList()).build()) - .apply(new StringToValueMapTransform()) - .apply(new ValueMapToFeatureRowTransform(importSpec.getEntities(0), - importSpec.getSchema())); - break; - case JSON: - Preconditions.checkArgument(importSpec.getEntitiesCount() == 1, - "pubsub source with format json, import spec must have one entity"); - featureRows = input.getPipeline() - .apply(fromSubscriptionOrTopic(PubsubIO.readStrings(), options)) - .apply(new ParseJsonTransform()) - .apply(new ValueMapToFeatureRowTransform(importSpec.getEntities(0), - importSpec.getSchema())); - break; - default: - throw new IllegalArgumentException( - String.format("Unhandled message format %s", options.messageFormat)); - } - if (options.discardUnknownFeatures) { - List featureIds = new ArrayList<>(); - for (Field field : importSpec.getSchema().getFieldsList()) { - String featureId = field.getFeatureId(); - if (!Strings.isNullOrEmpty(featureId)) { - featureIds.add(featureId); - } - } - return featureRows.apply(ParDo.of(new FilterFeatureRowDoFn(featureIds))); - } - return featureRows; - } - - private PubsubIO.Read fromSubscriptionOrTopic(PubsubIO.Read read, - PubSubReadOptions options) { - if (!Strings.isNullOrEmpty(options.subscription)) { - read = read.fromSubscription(options.subscription); - } else if (!Strings.isNullOrEmpty(options.topic)) { - read = read.fromTopic(options.topic); - } - return read; - } - - public enum MessageFormat { - @JsonProperty(value = "featureRow") - FEATURE_ROW, - @JsonProperty(value = "json") - JSON, - @JsonProperty(value = "csv") - CSV - } - - public static class PubSubReadOptions implements Options { - - public String subscription; - public String topic; - public MessageFormat messageFormat = MessageFormat.FEATURE_ROW; - public boolean discardUnknownFeatures = false; - - @AssertTrue(message = "subscription or topic must be set") - boolean isValid() { - return !Strings.isNullOrEmpty(subscription) || !Strings.isNullOrEmpty(topic); - } - } - - @AutoService(FeatureSourceFactory.class) - public static class Factory implements FeatureSourceFactory { - - @Override - public String getType() { - return PUBSUB_FEATURE_SOURCE_TYPE; - } - - @Override - public FeatureSource create(ImportSpec importSpec) { - checkArgument(importSpec.getType().equals(getType())); - return new PubSubFeatureSource(importSpec); - } - } -} diff --git a/ingestion/src/main/java/feast/store/FeatureStoreFactory.java b/ingestion/src/main/java/feast/store/FeatureStoreFactory.java deleted file mode 100644 index 998df98b924..00000000000 --- a/ingestion/src/main/java/feast/store/FeatureStoreFactory.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import feast.ingestion.model.Specs; -import feast.specs.StorageSpecProto.StorageSpec; - -public interface FeatureStoreFactory { - FeatureStoreWrite create(StorageSpec storageSpec, Specs specs); - - String getType(); -} diff --git a/ingestion/src/main/java/feast/store/FeatureStoreWrite.java b/ingestion/src/main/java/feast/store/FeatureStoreWrite.java deleted file mode 100644 index 759ff3bfa28..00000000000 --- a/ingestion/src/main/java/feast/store/FeatureStoreWrite.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -public abstract class FeatureStoreWrite extends PTransform, PDone> {} diff --git a/ingestion/src/main/java/feast/store/FileStoreOptions.java b/ingestion/src/main/java/feast/store/FileStoreOptions.java deleted file mode 100644 index 5df669f0e98..00000000000 --- a/ingestion/src/main/java/feast/store/FileStoreOptions.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import javax.validation.constraints.NotEmpty; -import org.joda.time.Duration; -import org.joda.time.format.ISOPeriodFormat; -import feast.options.Options; -import feast.options.Validation.ISO8601Duration; - -public class FileStoreOptions implements Options { - static final Duration DEFAULT_WINDOW_SIZE = Duration.standardMinutes(5); - - @NotEmpty public String path; - - @ISO8601Duration public String windowSize; - - @JsonIgnore public String jobName; - - @JsonIgnore - public Duration getWindowDuration() { - if (windowSize == null) { - return DEFAULT_WINDOW_SIZE; - } - return ISOPeriodFormat.standard().parsePeriod(windowSize).toStandardDuration(); - } -} diff --git a/ingestion/src/main/java/feast/store/NoOpIO.java b/ingestion/src/main/java/feast/store/NoOpIO.java deleted file mode 100644 index 65a68118c9b..00000000000 --- a/ingestion/src/main/java/feast/store/NoOpIO.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import feast.ingestion.transform.fn.Identity; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -public class NoOpIO { - - public static class Write extends FeatureStoreWrite { - - @Override - public PDone expand(PCollection input) { - input.apply(getName(), ParDo.of(new Identity(getName()))); - return PDone.in(input.getPipeline()); - } - } -} diff --git a/ingestion/src/main/java/feast/store/TextFileDynamicIO.java b/ingestion/src/main/java/feast/store/TextFileDynamicIO.java deleted file mode 100644 index 250b46ea6b7..00000000000 --- a/ingestion/src/main/java/feast/store/TextFileDynamicIO.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.io.FileIO; -import org.apache.beam.sdk.io.TextIO; -import org.apache.beam.sdk.io.WriteFilesResult; -import org.apache.beam.sdk.transforms.Contextful; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.windowing.AfterWatermark; -import org.apache.beam.sdk.transforms.windowing.FixedWindows; -import org.apache.beam.sdk.transforms.windowing.Window; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollection.IsBounded; -import org.apache.beam.sdk.values.PDone; -import org.joda.time.Duration; - -public class TextFileDynamicIO { - - private TextFileDynamicIO() { - } - - @AllArgsConstructor - public static class Write extends PTransform>, PDone> { - - private final FileStoreOptions options; - private final String suffix; - - /** - * Writes to different file sinks based on a - */ - @Override - public PDone expand(PCollection> input) { - final String folderName = options.jobName != null ? options.jobName : "unknown-jobs"; - FileIO.Write> write = - FileIO.>writeDynamic() - .by(KV::getKey) - .withDestinationCoder(StringUtf8Coder.of()) - .withNaming( - Contextful.fn( - (key) -> FileIO.Write.defaultNaming(folderName + "/" + key + "/part", suffix))) - .via(Contextful.fn(KV::getValue), Contextful.fn((entityName) -> TextIO.sink())) - .to(options.path); - - if (input.isBounded().equals(IsBounded.UNBOUNDED)) { - Window> minuteWindow = - Window.>into(FixedWindows.of(options.getWindowDuration())) - .triggering(AfterWatermark.pastEndOfWindow()) - .discardingFiredPanes() - .withAllowedLateness(Duration.ZERO); - input = input.apply(minuteWindow); - write = write.withNumShards(10); - } - WriteFilesResult outputFiles = input.apply(write); - return PDone.in(outputFiles.getPipeline()); - } - } -} diff --git a/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactory.java b/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactory.java deleted file mode 100644 index 6047e4c0887..00000000000 --- a/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactory.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors; - -import feast.store.FeatureStoreFactory; - -public interface FeatureErrorsFactory extends FeatureStoreFactory {} diff --git a/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactoryService.java b/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactoryService.java deleted file mode 100644 index 4f5ad2c50a2..00000000000 --- a/ingestion/src/main/java/feast/store/errors/FeatureErrorsFactoryService.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors; - -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; -import lombok.extern.slf4j.Slf4j; - -/** - * Service class for fetching all the FeatureErrorsFactory instances available - */ -@Slf4j -public class FeatureErrorsFactoryService { - - private static ServiceLoader serviceLoader = ServiceLoader - .load(FeatureErrorsFactory.class); - private static List manuallyRegistered = new ArrayList<>(); - - static { - for (FeatureErrorsFactory store : getAll()) { - log.info("FeatureErrorsFactory type found: " + store.getType()); - } - } - - public static List getAll() { - return Lists.newArrayList( - Iterators.concat(manuallyRegistered.iterator(), serviceLoader.iterator())); - } - - /** - * Get store of the given subclass. - */ - public static T get(Class clazz) { - for (FeatureErrorsFactory store : getAll()) { - if (clazz.isInstance(store)) { - //noinspection unchecked - return (T) store; - } - } - return null; - } - - public static void register(FeatureErrorsFactory store) { - manuallyRegistered.add(store); - } -} diff --git a/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsFactory.java b/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsFactory.java deleted file mode 100644 index 42df872ff7d..00000000000 --- a/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors.json; - -import com.google.auto.service.AutoService; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.options.OptionsParser; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.FileStoreOptions; -import feast.store.errors.FeatureErrorsFactory; -import lombok.AllArgsConstructor; - -@AutoService(FeatureErrorsFactory.class) -@AllArgsConstructor - -public class JsonFileErrorsFactory implements FeatureErrorsFactory { - - public static final String JSON_FILES_TYPE = "file.json"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - FileStoreOptions options = - OptionsParser.parse(storageSpec.getOptionsMap(), FileStoreOptions.class); - options.jobName = specs.getJobName(); - return new JsonFileErrorsWrite(options); - } - - @Override - public String getType() { - return JSON_FILES_TYPE; - } -} diff --git a/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsWrite.java b/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsWrite.java deleted file mode 100644 index 2ae55fa3b46..00000000000 --- a/ingestion/src/main/java/feast/store/errors/json/JsonFileErrorsWrite.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors.json; - -import static org.apache.beam.sdk.values.TypeDescriptors.kvs; -import static org.apache.beam.sdk.values.TypeDescriptors.strings; - -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.store.FeatureStoreWrite; -import feast.store.FileStoreOptions; -import feast.store.TextFileDynamicIO; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.MapElements; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -@AllArgsConstructor -public class JsonFileErrorsWrite extends FeatureStoreWrite { - - private FileStoreOptions options; - - @Override - public PDone expand(PCollection input) { - return input - .apply( - "Map to strings", - MapElements.into(kvs(strings(), strings())) - .via( - (rowExtended) -> { - try { - return KV.of( - rowExtended.getRow().getEntityName(), - JsonFormat.printer() - .omittingInsignificantWhitespace() - .print(rowExtended)); - } catch (Exception e) { - return KV.of( - rowExtended.getRow().getEntityName(), - e.toString()); - } - })) - .apply("Write Error Json Files", new TextFileDynamicIO.Write(options, ".json")); - } -} diff --git a/ingestion/src/main/java/feast/store/errors/logging/LogIO.java b/ingestion/src/main/java/feast/store/errors/logging/LogIO.java deleted file mode 100644 index 45a6c6826a0..00000000000 --- a/ingestion/src/main/java/feast/store/errors/logging/LogIO.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors.logging; - -import feast.store.FeatureStoreWrite; -import feast.ingestion.transform.fn.LoggerDoFn; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; -import org.slf4j.event.Level; - -public class LogIO { - - @AllArgsConstructor - public static class Write extends FeatureStoreWrite { - - private Level level; - - @Override - public PDone expand(PCollection input) { - input.apply("Log to " + level.toString(), ParDo.of(new LoggerDoFn(level))); - return PDone.in(input.getPipeline()); - } - } -} diff --git a/ingestion/src/main/java/feast/store/errors/logging/StderrFeatureErrorsFactory.java b/ingestion/src/main/java/feast/store/errors/logging/StderrFeatureErrorsFactory.java deleted file mode 100644 index 0e904928db0..00000000000 --- a/ingestion/src/main/java/feast/store/errors/logging/StderrFeatureErrorsFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors.logging; - -import com.google.auto.service.AutoService; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.errors.FeatureErrorsFactory; -import org.slf4j.event.Level; - -@AutoService(FeatureErrorsFactory.class) -public class StderrFeatureErrorsFactory implements FeatureErrorsFactory { - - public static final String TYPE_STDERR = "stderr"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - return new LogIO.Write(Level.ERROR); - } - - @Override - public String getType() { - return TYPE_STDERR; - } -} diff --git a/ingestion/src/main/java/feast/store/errors/logging/StdoutFeatureErrorsFactory.java b/ingestion/src/main/java/feast/store/errors/logging/StdoutFeatureErrorsFactory.java deleted file mode 100644 index afc3a35d1b5..00000000000 --- a/ingestion/src/main/java/feast/store/errors/logging/StdoutFeatureErrorsFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.errors.logging; - -import com.google.auto.service.AutoService; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.errors.FeatureErrorsFactory; -import org.slf4j.event.Level; - -@AutoService(FeatureErrorsFactory.class) -public class StdoutFeatureErrorsFactory implements FeatureErrorsFactory { - - public static final String TYPE_STDOUT = "stdout"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - return new LogIO.Write(Level.INFO); - } - - @Override - public String getType() { - return TYPE_STDOUT; - } -} diff --git a/ingestion/src/main/java/feast/store/serving/FeatureServingFactory.java b/ingestion/src/main/java/feast/store/serving/FeatureServingFactory.java deleted file mode 100644 index ca9491973e9..00000000000 --- a/ingestion/src/main/java/feast/store/serving/FeatureServingFactory.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving; - -import feast.store.FeatureStoreFactory; - -public interface FeatureServingFactory extends FeatureStoreFactory {} diff --git a/ingestion/src/main/java/feast/store/serving/FeatureServingFactoryService.java b/ingestion/src/main/java/feast/store/serving/FeatureServingFactoryService.java deleted file mode 100644 index d720c0dca1f..00000000000 --- a/ingestion/src/main/java/feast/store/serving/FeatureServingFactoryService.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving; - -import avro.shaded.com.google.common.collect.Lists; -import com.google.common.collect.Iterators; -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; -import lombok.extern.slf4j.Slf4j; - -/** - * Service class for fetching all the FeatureServingFactory instances available - */ -@Slf4j -public class FeatureServingFactoryService { - - private static ServiceLoader serviceLoader = ServiceLoader - .load(FeatureServingFactory.class); - private static List manuallyRegistered = new ArrayList<>(); - - static { - for (FeatureServingFactory store : getAll()) { - log.info("FeatureServingFactory type found: " + store.getType()); - } - } - - public static List getAll() { - return Lists.newArrayList( - Iterators.concat(manuallyRegistered.iterator(), serviceLoader.iterator())); - } - - /** - * Get store of the given subclass. - */ - public static T get(Class clazz) { - for (FeatureServingFactory store : getAll()) { - if (clazz.isInstance(store)) { - //noinspection unchecked - return (T) store; - } - } - return null; - } - - public static void register(FeatureServingFactory store) { - manuallyRegistered.add(store); - } -} diff --git a/ingestion/src/main/java/feast/store/serving/FeatureServingStoreClient.java b/ingestion/src/main/java/feast/store/serving/FeatureServingStoreClient.java deleted file mode 100644 index 5aa7251b894..00000000000 --- a/ingestion/src/main/java/feast/store/serving/FeatureServingStoreClient.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving; - -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.Collection; -import java.util.List; - -/** - * Abstraction of Client for Feast Serving Store - */ -public interface FeatureServingStoreClient { - - /** - * Get a the latest FeatureRow containing features for a single entity id - */ - FeatureRow get(String entityName, String entityId, List featureSpecs); - - /** - * Get a the latest FeatureRow for containing features for each entity id - */ - List get( - String entityName, Collection entityIds, List featureSpecs); - -} \ No newline at end of file diff --git a/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRowDoFn.java b/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRowDoFn.java new file mode 100644 index 00000000000..a2ac738e01f --- /dev/null +++ b/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRowDoFn.java @@ -0,0 +1,98 @@ +package feast.store.serving.bigquery; + +import com.google.api.services.bigquery.model.TableRow; +import com.google.protobuf.util.Timestamps; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto.Field; +import java.util.Base64; +import java.util.stream.Collectors; +import org.apache.beam.sdk.transforms.DoFn; +import org.joda.time.Instant; + +// TODO: Validate FeatureRow against FeatureSetSpec +// i.e. that the value types in FeatureRow matches against those in FeatureSetSpec + +public class FeatureRowToTableRowDoFn extends DoFn { + private static final String EVENT_TIMESTAMP_COLUMN = "event_timestamp"; + private static final String CREATED_TIMESTAMP_COLUMN = "created_timestamp"; + private static final String JOB_ID_COLUMN = "job_id"; + private final String jobId; + + public FeatureRowToTableRowDoFn(String jobId) { + this.jobId = jobId; + } + + public static String getEventTimestampColumn() { + return EVENT_TIMESTAMP_COLUMN; + } + + @ProcessElement + public void processElement(@Element FeatureRow featureRow, OutputReceiver out) { + out.output(createTableRow(featureRow, jobId)); + } + + private static TableRow createTableRow(FeatureRow featureRow, String jobId) { + + TableRow tableRow = new TableRow(); + tableRow.set(EVENT_TIMESTAMP_COLUMN, Timestamps.toString(featureRow.getEventTimestamp())); + tableRow.set(CREATED_TIMESTAMP_COLUMN, Instant.now().toString()); + tableRow.set(JOB_ID_COLUMN, jobId); + + for (Field field : featureRow.getFieldsList()) { + switch (field.getValue().getValCase()) { + case BYTES_VAL: + tableRow.set( + field.getName(), + Base64.getEncoder().encodeToString(field.getValue().getBytesVal().toByteArray())); + break; + case STRING_VAL: + tableRow.set(field.getName(), field.getValue().getStringVal()); + break; + case INT32_VAL: + tableRow.set(field.getName(), field.getValue().getInt32Val()); + break; + case INT64_VAL: + tableRow.set(field.getName(), field.getValue().getInt64Val()); + break; + case DOUBLE_VAL: + tableRow.set(field.getName(), field.getValue().getDoubleVal()); + break; + case FLOAT_VAL: + tableRow.set(field.getName(), field.getValue().getFloatVal()); + break; + case BOOL_VAL: + tableRow.set(field.getName(), field.getValue().getBoolVal()); + break; + case BYTES_LIST_VAL: + tableRow.set( + field.getName(), + field.getValue().getBytesListVal().getValList().stream() + .map(x -> Base64.getEncoder().encodeToString(x.toByteArray())) + .collect(Collectors.toList())); + break; + case STRING_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getStringListVal().getValList()); + break; + case INT32_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getInt32ListVal().getValList()); + break; + case INT64_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getInt64ListVal().getValList()); + break; + case DOUBLE_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getDoubleListVal().getValList()); + break; + case FLOAT_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getFloatListVal().getValList()); + break; + case BOOL_LIST_VAL: + tableRow.set(field.getName(), field.getValue().getBytesListVal().getValList()); + break; + case VAL_NOT_SET: + break; + } + } + + return tableRow; + } +} diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableFeatureOptions.java b/ingestion/src/main/java/feast/store/serving/bigtable/BigTableFeatureOptions.java deleted file mode 100644 index d68f11cc896..00000000000 --- a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableFeatureOptions.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import com.fasterxml.jackson.annotation.JsonProperty; -import feast.options.Options; - -public class BigTableFeatureOptions implements Options { - - @JsonProperty(value = "bigtable.family") - public String family; -} diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableServingStoreFactory.java b/ingestion/src/main/java/feast/store/serving/bigtable/BigTableServingStoreFactory.java deleted file mode 100644 index 9583a5d431b..00000000000 --- a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableServingStoreFactory.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.options.OptionsParser; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.serving.FeatureServingFactory; - -@AutoService(FeatureServingFactory.class) -public class BigTableServingStoreFactory implements FeatureServingFactory { - - public static String TYPE_BIGTABLE = "bigtable"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - Preconditions.checkArgument(storageSpec.getType().equals(getType())); - - BigTableStoreOptions options = - OptionsParser.parse(storageSpec.getOptionsMap(), BigTableStoreOptions.class); - return new FeatureRowBigTableIO.Write(options, specs); - } - - @Override - public String getType() { - return TYPE_BIGTABLE; - } -} diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableStoreOptions.java b/ingestion/src/main/java/feast/store/serving/bigtable/BigTableStoreOptions.java deleted file mode 100644 index 92a32de31f4..00000000000 --- a/ingestion/src/main/java/feast/store/serving/bigtable/BigTableStoreOptions.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import feast.options.Options; -import java.io.Serializable; -import javax.validation.constraints.NotEmpty; - -public class BigTableStoreOptions implements Options, Serializable { - - public static final String DEFAULT_FAMILY = "default"; - @NotEmpty - public String project; - @NotEmpty - public String instance; - public String family = DEFAULT_FAMILY; - public String prefix; -} diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowBigTableIO.java b/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowBigTableIO.java deleted file mode 100644 index ce94c0979a6..00000000000 --- a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowBigTableIO.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import com.google.cloud.bigtable.beam.CloudBigtableConfiguration; -import com.google.cloud.bigtable.beam.CloudBigtableIO; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.util.Collections; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; -import org.apache.hadoop.hbase.client.Mutation; - -@Slf4j -public class FeatureRowBigTableIO { - - public static class Write extends FeatureStoreWrite { - - private BigTableStoreOptions bigTableOptions; - private Specs specs; - - public Write(BigTableStoreOptions bigTableOptions, Specs specs) { - this.bigTableOptions = bigTableOptions; - this.specs = specs; - } - - @Override - public PDone expand(PCollection input) { - log.info("Using BigTable options: " + bigTableOptions.toString()); - - // entity name to mutation key value - PCollection> mutations = - input.apply( - "Map to BigTable mutations", - ParDo.of(new FeatureRowToBigTableMutationDoFn(bigTableOptions.prefix, bigTableOptions.family, specs))); - - PCollection>> iterableMutations = - mutations.apply( - ParDo.of( - new DoFn, KV>>() { - @ProcessElement - public void processElement(ProcessContext context) { - KV kv = context.element(); - KV> mutationIters = - KV.of(kv.getKey(), Collections.singleton(kv.getValue())); - context.output(mutationIters); - } - })); - return iterableMutations.apply( - CloudBigtableIO.writeToMultipleTables( - new CloudBigtableConfiguration.Builder() - .withInstanceId(bigTableOptions.instance) - .withProjectId(bigTableOptions.project) - .build())); - } - } -} diff --git a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java deleted file mode 100644 index 1e203ebe0eb..00000000000 --- a/ingestion/src/main/java/feast/store/serving/bigtable/FeatureRowToBigTableMutationDoFn.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import feast.SerializableCache; -import feast.ingestion.model.Specs; -import feast.ingestion.util.DateUtil; -import feast.options.OptionsParser; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.storage.BigTableProto.BigTableRowKey; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.values.KV; -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hbase.client.Mutation; -import org.apache.hadoop.hbase.client.Put; - -/** - * DoFn for taking a feature row and making Bigtable mutations out of it. Also keys the mutations by - * the entity name which should be used as the table name. - */ -@Slf4j -public class FeatureRowToBigTableMutationDoFn - extends DoFn> { - - private static final String LATEST_KEY = "0"; - - private final SerializableCache servingOptionsCache = - SerializableCache.builder() - .loadingFunction( - (featureSpec) -> - OptionsParser.lenientParse( - featureSpec.getOptionsMap(), - BigTableFeatureOptions.class)) - .build(); - private final String tablePrefix; - private final String family; - private final Specs specs; - - - FeatureRowToBigTableMutationDoFn(String tablePrefix, String family, Specs specs) { - this.tablePrefix = tablePrefix; - this.family = family; - this.specs = specs; - } - - public static BigTableRowKey makeBigTableRowKey(String entityKey) { - - return BigTableRowKey.newBuilder() - .setSha1Prefix(DigestUtils.sha1Hex(entityKey).substring(0, 7)) - .setEntityKey(entityKey) - .setReversedMillis(LATEST_KEY) - .build(); - } - - @ProcessElement - public void processElement(ProcessContext context) { - FeatureRowExtended rowExtended = context.element(); - FeatureRow row = rowExtended.getRow(); - Put put = makePut(rowExtended); - context.output(KV.of(getTableName(row), put)); - } - - private String getTableName(FeatureRow row) { - if (!Strings.isNullOrEmpty(tablePrefix)) { - return tablePrefix + row.getEntityName(); - } else { - return row.getEntityName(); - } - } - - /** - * Given an row and a feature info service, build a BigTable Put mutation - * - *

bigtable row key = {sha1(row.key), row.key, row.timestamp} family = {feature.group} - * qualifier = {feature.name} value = {feature.value} - */ - public Put makePut(FeatureRowExtended rowExtended) { - FeatureRow row = rowExtended.getRow(); - // We always additinally overwrite a None granularity row so that it is trivial to retrieve the - // latest across all features. - Put latestPut = new Put(makeBigTableRowKey(row.getEntityKey()).toByteArray()); - for (Feature feature : row.getFeaturesList()) { - FeatureSpec featureSpec = specs.getFeatureSpec(feature.getId()); - BigTableFeatureOptions options = servingOptionsCache.get(featureSpec); - - Preconditions.checkArgument(!Strings.isNullOrEmpty(this.family) - || !Strings.isNullOrEmpty(options.family)); - byte[] family = (!Strings.isNullOrEmpty(options.family) ? options.family : this.family) - .getBytes(Charsets.UTF_8); - byte[] qualifier = feature.getId().getBytes(Charsets.UTF_8); - byte[] value = feature.getValue().toByteArray(); - long version = DateUtil.toMillis(row.getEventTimestamp()); - latestPut.addColumn(family, qualifier, version, value); - } - return latestPut; - } -} diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowRedisIO.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowRedisIO.java deleted file mode 100644 index 9c5772140b6..00000000000 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowRedisIO.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.store.serving.redis.RedisCustomIO.RedisMutation; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -public class FeatureRowRedisIO { - - public static class Write extends FeatureStoreWrite { - - private final RedisStoreOptions options; - private final Specs specs; - - public Write(RedisStoreOptions options, Specs specs) { - this.options = options; - this.specs = specs; - } - - @Override - public PDone expand(PCollection input) { - PCollection mutation = - input.apply("Map to Redis mutations", ParDo.of(new FeatureRowToRedisMutationDoFn(specs))); - return mutation.apply( - RedisCustomIO.write(options.host, options.port) - .withBatchSize(options.batchSize) - .withTimeout(options.timeout)); - } - } -} diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java index 1d2ebfa5cb6..5c51b5d9340 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java @@ -17,52 +17,41 @@ package feast.store.serving.redis; -import feast.SerializableCache; -import feast.ingestion.model.Specs; -import feast.ingestion.util.DateUtil; -import feast.options.OptionsParser; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.storage.RedisProto.RedisBucketKey; -import feast.storage.RedisProto.RedisBucketValue; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.storage.RedisProto.RedisKey; +import feast.storage.RedisProto.RedisKey.Builder; import feast.store.serving.redis.RedisCustomIO.Method; import feast.store.serving.redis.RedisCustomIO.RedisMutation; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; -import java.util.Random; +import feast.types.FieldProto.Field; +import java.util.Set; +import java.util.stream.Collectors; import org.apache.beam.sdk.transforms.DoFn; -import org.apache.commons.codec.digest.DigestUtils; -import org.joda.time.Duration; +import org.slf4j.Logger; -public class FeatureRowToRedisMutationDoFn extends DoFn { +public class FeatureRowToRedisMutationDoFn extends DoFn { - private final SerializableCache servingOptionsCache = - SerializableCache.builder() - .loadingFunction( - (featureSpec) -> - OptionsParser.lenientParse( - featureSpec.getOptionsMap(), - RedisFeatureOptions.class)) - .build(); - private Specs specs; - private Random random; + private static final Logger log = org.slf4j.LoggerFactory + .getLogger(FeatureRowToRedisMutationDoFn.class); + private FeatureSetSpec featureSetSpec; - public FeatureRowToRedisMutationDoFn(Specs specs) { - this.specs = specs; - this.random = new Random(); + public FeatureRowToRedisMutationDoFn(FeatureSetSpec featureSetSpec) { + this.featureSetSpec = featureSetSpec; } - static RedisBucketKey getRedisBucketKey( - String entityId, String featureIdSha1Prefix, long bucketId) { - return RedisBucketKey.newBuilder() - .setEntityKey(entityId) - .setFeatureIdSha1Prefix(featureIdSha1Prefix) - .setBucketId(bucketId) - .build(); - } + private RedisKey getKey(FeatureRow featureRow) { + Set entityNames = featureSetSpec.getEntitiesList().stream() + .map(EntitySpec::getName).collect(Collectors.toSet()); - static String getFeatureIdSha1Prefix(String featureId) { - return DigestUtils.sha1Hex(featureId.getBytes()).substring(0, 7); + Builder redisKeyBuilder = RedisKey.newBuilder() + .setFeatureSet(featureRow.getFeatureSet()); + for (Field field : featureRow.getFieldsList()) { + if (entityNames.contains(field.getName())) { + redisKeyBuilder.addEntities(field); + } + } + return redisKeyBuilder.build(); } /** @@ -70,35 +59,14 @@ static String getFeatureIdSha1Prefix(String featureId) { */ @ProcessElement public void processElement(ProcessContext context) { - FeatureRowExtended rowExtended = context.element(); - FeatureRow row = rowExtended.getRow(); - - String entityKey = row.getEntityKey(); - - for (Feature feature : row.getFeaturesList()) { - String featureId = feature.getId(); - FeatureSpec featureSpec = specs.getFeatureSpec(featureId); - String featureIdHash = getFeatureIdSha1Prefix(featureId); - - RedisFeatureOptions options = servingOptionsCache.get(featureSpec); - - RedisBucketValue value = - RedisBucketValue.newBuilder() - .setValue(feature.getValue()) - .setEventTimestamp(row.getEventTimestamp()) - .build(); - RedisBucketKey keyForLatest = getRedisBucketKey(entityKey, featureIdHash, 0L); - - Duration expiry = options.getExpiryDuration(); - // add randomness to expiry so that it won't expire in the same time. - long expiryMillis = (long) (expiry.getMillis() * (1 + random.nextFloat())); - context.output( - RedisMutation.builder() - .key(keyForLatest.toByteArray()) - .value(value.toByteArray()) - .expiryMillis(expiryMillis) - .method(Method.SET) - .build()); + FeatureRow featureRow = context.element(); + RedisKey key = getKey(featureRow); + try { + RedisMutation redisMutation = new RedisMutation(Method.SET, key.toByteArray(), + featureRow.toByteArray(), null, null); + context.output(redisMutation); + } catch (Exception e) { + log.error(e.getMessage(), e); } } } diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java index 4dc53467c70..d5b6c695d60 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java +++ b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java @@ -18,8 +18,7 @@ // package io.suryawirawan.henry.beam.redis.io; package feast.store.serving.redis; -import lombok.Builder; -import lombok.Data; +import org.apache.avro.reflect.Nullable; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.transforms.DoFn; @@ -34,12 +33,14 @@ import redis.clients.jedis.Response; public class RedisCustomIO { + private static final int DEFAULT_BATCH_SIZE = 1000; private static final int DEFAULT_TIMEOUT = 2000; - private static final Logger LOGGER = LoggerFactory.getLogger(RedisCustomIO.class); + private static final Logger log = LoggerFactory.getLogger(RedisCustomIO.class); - private RedisCustomIO() {} + private RedisCustomIO() { + } public static Write write(String host, int port) { return new Write(host, port); @@ -53,7 +54,9 @@ public enum Method { */ APPEND, - /** Use SET command. If key already holds a value, it is overwritten. */ + /** + * Use SET command. If key already holds a value, it is overwritten. + */ SET, /** @@ -86,18 +89,74 @@ public enum Method { ZADD } - @Builder - @Data - @DefaultCoder(value = AvroCoder.class) + @DefaultCoder(AvroCoder.class) public static class RedisMutation { - private final Method method; - private final byte[] key; - private final byte[] value; - private final long expiryMillis; - private final long score; // Score is only utilized when method is ZSET + + private Method method; + private byte[] key; + private byte[] value; + @Nullable + private Long expiryMillis; + @Nullable + private Long score; + + public RedisMutation() {} + + public RedisMutation(Method method, byte[] key, byte[] value, @Nullable Long expiryMillis, + @Nullable Long score) { + this.method = method; + this.key = key; + this.value = value; + this.expiryMillis = expiryMillis; + this.score = score; + } + + public Method getMethod() { + return method; + } + + public void setMethod(Method method) { + this.method = method; + } + + public byte[] getKey() { + return key; + } + + public void setKey(byte[] key) { + this.key = key; + } + + public byte[] getValue() { + return value; + } + + public void setValue(byte[] value) { + this.value = value; + } + + @Nullable + public Long getExpiryMillis() { + return expiryMillis; + } + + public void setExpiryMillis(@Nullable Long expiryMillis) { + this.expiryMillis = expiryMillis; + } + + @Nullable + public Long getScore() { + return score; + } + + public void setScore(@Nullable Long score) { + this.score = score; + } } - /** ServingStoreWrite data to a Redis server. */ + /** + * ServingStoreWrite data to a Redis server. + */ public static class Write extends PTransform, PDone> { private WriteDoFn dofn; @@ -138,12 +197,16 @@ public static class WriteDoFn extends DoFn { } public WriteDoFn withBatchSize(int batchSize) { - if (batchSize > 0) this.batchSize = batchSize; + if (batchSize > 0) { + this.batchSize = batchSize; + } return this; } public WriteDoFn withTimeout(int timeout) { - if (timeout > 0) this.timeout = timeout; + if (timeout > 0) { + this.timeout = timeout; + } return this; } @@ -163,7 +226,7 @@ public void startBundle() { public void processElement(ProcessContext context) { RedisMutation mutation = context.element(); writeRecord(mutation); - if (mutation.getExpiryMillis() > 0) { + if (mutation.getExpiryMillis() != null && mutation.getExpiryMillis() > 0) { pipeline.pexpire(mutation.getKey(), mutation.getExpiryMillis()); } batchCount++; diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisFeatureOptions.java b/ingestion/src/main/java/feast/store/serving/redis/RedisFeatureOptions.java deleted file mode 100644 index d68cbb2989c..00000000000 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisFeatureOptions.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import feast.options.Options; -import feast.options.Validation.ISO8601Duration; -import org.joda.time.Duration; -import org.joda.time.format.ISOPeriodFormat; - -public class RedisFeatureOptions implements Options { - - static final String DEFAULT_EXPIRY = "PT0H"; - - @ISO8601Duration - @JsonProperty(value = "redis.expiry") - public String expiry = DEFAULT_EXPIRY; // ISO8601 Period - - @JsonIgnore - Duration getExpiryDuration() { - return ISOPeriodFormat.standard().parsePeriod(expiry).toStandardDuration(); - } -} diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisServingFactory.java b/ingestion/src/main/java/feast/store/serving/redis/RedisServingFactory.java deleted file mode 100644 index ca023c616bd..00000000000 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisServingFactory.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.options.OptionsParser; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.serving.FeatureServingFactory; - -@AutoService(FeatureServingFactory.class) -public class RedisServingFactory implements FeatureServingFactory { - - public static final String TYPE_REDIS = "redis"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - Preconditions.checkArgument( - storageSpec.getType().equals(TYPE_REDIS), "Storage spec type was not " + TYPE_REDIS); - RedisStoreOptions options = - OptionsParser.parse(storageSpec.getOptionsMap(), RedisStoreOptions.class); - return new FeatureRowRedisIO.Write(options, specs); - } - - public String getType() { - return TYPE_REDIS; - } -} diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisStoreOptions.java b/ingestion/src/main/java/feast/store/serving/redis/RedisStoreOptions.java deleted file mode 100644 index 9b257d40f00..00000000000 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisStoreOptions.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import feast.options.Options; -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.Positive; -import javax.validation.constraints.PositiveOrZero; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.NoArgsConstructor; - -@Builder -@NoArgsConstructor -@AllArgsConstructor -public class RedisStoreOptions implements Options { - - @NotEmpty public String host; - - @Positive public int port; - - @PositiveOrZero public int batchSize; - - /** Timeout in milliseconds * */ - @PositiveOrZero public int timeout; -} diff --git a/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactory.java b/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactory.java deleted file mode 100644 index 72b5617962e..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactory.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse; - -import feast.store.FeatureStoreFactory; - -public interface FeatureWarehouseFactory extends FeatureStoreFactory { - -} diff --git a/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactoryService.java b/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactoryService.java deleted file mode 100644 index 06ae7236134..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/FeatureWarehouseFactoryService.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse; - -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import java.util.ArrayList; -import java.util.List; -import java.util.ServiceLoader; -import lombok.extern.slf4j.Slf4j; - -/** - * Service class for fetching all the FeatureWarehouseFactory instances available - */ -@Slf4j -public class FeatureWarehouseFactoryService { - private static ServiceLoader serviceLoader = - ServiceLoader.load(FeatureWarehouseFactory.class); - private static List manuallyRegistered = new ArrayList<>(); - - static { - for (FeatureWarehouseFactory store : getAll()) { - log.info("FeatureWarehouseFactory type found: " + store.getType()); - } -} - - public static List getAll() { - return Lists.newArrayList( - Iterators.concat(manuallyRegistered.iterator(), serviceLoader.iterator())); - } - - /** Get store of the given subclass. */ - public static T get(Class clazz) { - for (FeatureWarehouseFactory store : getAll()) { - if (clazz.isInstance(store)) { - //noinspection unchecked - return (T) store; - } - } - return null; - } - - public static void register(FeatureWarehouseFactory store) { - manuallyRegistered.add(store); - } - } diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryStoreOptions.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryStoreOptions.java deleted file mode 100644 index f82efb58e8b..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryStoreOptions.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import javax.validation.constraints.NotEmpty; -import feast.options.Options; - -public class BigQueryStoreOptions implements Options { - @NotEmpty public String project; - @NotEmpty public String dataset; - public String tempLocation; // gcs or local. -} diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryWarehouseFactory.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryWarehouseFactory.java deleted file mode 100644 index 33ad75a32b3..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/BigQueryWarehouseFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import com.google.auto.service.AutoService; -import com.google.common.base.Preconditions; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.options.OptionsParser; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.warehouse.FeatureWarehouseFactory; - -@AutoService(FeatureWarehouseFactory.class) -public class BigQueryWarehouseFactory implements FeatureWarehouseFactory { - public static String TYPE_BIGQUERY = "bigquery"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - Preconditions.checkArgument( - storageSpec.getType().equals(TYPE_BIGQUERY), "Storage spec type was not " + TYPE_BIGQUERY); - - BigQueryStoreOptions options = - OptionsParser.parse(storageSpec.getOptionsMap(), BigQueryStoreOptions.class); - - return new FeatureRowBigQueryIO.Write(options, specs); - } - - @Override - public String getType() { - return TYPE_BIGQUERY; - } -} diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java deleted file mode 100644 index 06180f364ae..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowBigQueryIO.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import com.google.api.services.bigquery.model.TableSchema; -import com.google.common.base.Strings; -import com.google.inject.Inject; -import feast.ingestion.model.Specs; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.store.FeatureStoreWrite; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.Method; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; -import org.apache.beam.sdk.io.gcp.bigquery.DynamicDestinations; -import org.apache.beam.sdk.io.gcp.bigquery.TableDestination; -import org.apache.beam.sdk.io.gcp.bigquery.WriteResult; -import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollection.IsBounded; -import org.apache.beam.sdk.values.PDone; -import org.apache.beam.sdk.values.ValueInSingleWindow; -import org.joda.time.Duration; - -@Slf4j -public class FeatureRowBigQueryIO { - - public static class Write extends FeatureStoreWrite { - - private final BigQueryStoreOptions bigQueryOptions; - private final Specs specs; - - private Duration triggerFrequency = Duration.standardMinutes(5); - - @Inject - public Write(BigQueryStoreOptions bigQueryOptions, Specs specs) { - this.bigQueryOptions = bigQueryOptions; - this.specs = specs; - } - - @Override - public PDone expand(PCollection input) { - FeatureRowToBigQueryTableRowDoFn toTableRowDoFn = new FeatureRowToBigQueryTableRowDoFn(specs); - BigQueryIO.Write write = - BigQueryIO.write() - .to( - new DynamicDestinations() { - public String getDestination(ValueInSingleWindow element) { - FeatureRowExtended featureRowExtended = element.getValue(); - FeatureRow row = featureRowExtended.getRow(); - EntitySpec entityInfo = specs.getEntitySpec(row.getEntityName()); - - String tableName = entityInfo.getName(); - return bigQueryOptions.project - + ":" - + bigQueryOptions.dataset - + "." - + tableName; - } - - public TableDestination getTable(String tableSpec) { - return new TableDestination(tableSpec, "Table " + tableSpec); - } - - @Override - public TableSchema getSchema(String destination) { - return null; - } - }) - .withFormatFunction(toTableRowDoFn::toTableRow) - .withCreateDisposition(CreateDisposition.CREATE_NEVER) - .withWriteDisposition(WriteDisposition.WRITE_APPEND) - .withMethod(Method.FILE_LOADS); - if (!Strings.isNullOrEmpty(bigQueryOptions.tempLocation)) { - log.info( - "Setting customer GCS temp location for BigQuery write to " - + bigQueryOptions.tempLocation); - write = - write.withCustomGcsTempLocation(StaticValueProvider.of(bigQueryOptions.tempLocation)); - } - - if (input.isBounded() == IsBounded.UNBOUNDED) { - write = - write - .withTriggeringFrequency(triggerFrequency) - // this is apparently supposed to be the default according to beam code - // comments. - .withNumFileShards(100); - } - WriteResult result = input.apply(write); - return PDone.in(input.getPipeline()); - } - } -} diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowToBigQueryTableRowDoFn.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowToBigQueryTableRowDoFn.java deleted file mode 100644 index 828a8cc899f..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/FeatureRowToBigQueryTableRowDoFn.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import com.google.api.services.bigquery.model.TableRow; -import feast.ingestion.util.DateUtil; -import feast.ingestion.model.Specs; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.DoFn; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - -@AllArgsConstructor -public class FeatureRowToBigQueryTableRowDoFn extends DoFn { - - private static final String ENTITY_KEY_COLUMN = "id"; - private static final String EVENT_TIMESTAMP_COLUMN = "event_timestamp"; - private static final String CREATED_TIMESTAMP_COLUMN = "created_timestamp"; - private static final String JOB_ID_COLUMN = "job_id"; - - - private Specs specs; - - @ProcessElement - public void processElement(ProcessContext context) { - context.output(toTableRow(context.element())); - } - - public TableRow toTableRow(FeatureRowExtended featureRowExtended) { - FeatureRow featureRow = featureRowExtended.getRow(); - TableRow tableRow = new TableRow(); - - String entityKey = featureRow.getEntityKey(); - tableRow.set(ENTITY_KEY_COLUMN, entityKey); - tableRow.set( - EVENT_TIMESTAMP_COLUMN, - ValueBigQueryBuilder.bigQueryObjectOf( - Value.newBuilder().setTimestampVal(featureRow.getEventTimestamp()))); - tableRow.set( - CREATED_TIMESTAMP_COLUMN, - ValueBigQueryBuilder.bigQueryObjectOf( - Value.newBuilder() - .setTimestampVal(DateUtil.toTimestamp(DateTime.now(DateTimeZone.UTC))))); - tableRow.set(JOB_ID_COLUMN, specs.getJobName()); - - for (Feature feature : featureRow.getFeaturesList()) { - Object featureValue = ValueBigQueryBuilder.bigQueryObjectOf(feature.getValue()); - FeatureSpec featureSpec = specs.getFeatureSpec(feature.getId()); - tableRow.set(featureSpec.getName(), featureValue); - } - return tableRow; - } -} diff --git a/ingestion/src/main/java/feast/store/warehouse/bigquery/ValueBigQueryBuilder.java b/ingestion/src/main/java/feast/store/warehouse/bigquery/ValueBigQueryBuilder.java deleted file mode 100644 index 34700dc96b7..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/bigquery/ValueBigQueryBuilder.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.protobuf.ByteString; -import com.google.protobuf.Timestamp; -import java.nio.ByteBuffer; -import lombok.AllArgsConstructor; -import lombok.Getter; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; -import feast.ingestion.exceptions.TypeConversionException; -import feast.ingestion.util.DateUtil; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueOrBuilder; -import feast.types.ValueProto.ValueType; - -/** Builder class for taking objects received from BigQuery with their type and mapping to Values */ -public class ValueBigQueryBuilder { - - public static Value valueOf(Object object, StandardSQLTypeName standardSQLTypeName) { - return new ToValueBuilder(object, standardSQLTypeName).build(); - } - - public static ValueType.Enum feastValueTypeOf(StandardSQLTypeName standardSQLTypeName) { - return new ToValueBuilder(null, standardSQLTypeName).getValueType(); - } - - public static Object bigQueryObjectOf(ValueOrBuilder feastValue) { - return new ToBigQueryObject(feastValue).build(); - } - - public static class ToValueBuilder { - - @Getter private final StandardSQLTypeName standardSQLTypeName; - @Getter private final Object bigQueryObject; - - public ToValueBuilder(Object obj, StandardSQLTypeName standardSQLTypeName) { - this.bigQueryObject = obj; - this.standardSQLTypeName = standardSQLTypeName; - } - - public ValueType.Enum getValueType() { - switch (standardSQLTypeName) { - case BOOL: - return ValueType.Enum.BOOL; - case BYTES: // BYTES = java.nio.HeapByteBuffer - return ValueType.Enum.BYTES; - case FLOAT64: - return ValueType.Enum.DOUBLE; - case INT64: - return ValueType.Enum.INT64; - case STRUCT: - case STRING: // STRING = org.apache.avro.util.Utf8 - return ValueType.Enum.STRING; - case DATE: // DATE = org.apache.avro.util.Utf8 (yyyy-MM-dd) - case DATETIME: // DATETIME = org.apache.avro.util.Utf8 (yyyy-MM-ddTHH:mm:ss) - case TIMESTAMP: // TIMESTAMP = java.lang.Long (in microseconds) - return ValueType.Enum.TIMESTAMP; - case TIME: // TIME = org.apache.avro.util.Utf8 (HH:mm:ss) - case NUMERIC: // NUMERIC = java.nio.HeapByteBuffer - default: - throw new UnsupportedOperationException( - String.format( - "BigQuery type conversion not implemented for %s", standardSQLTypeName)); - } - } - - private Object getFeastObject() { - switch (standardSQLTypeName) { - case BOOL: - return bigQueryObject; - case BYTES: // BYTES = java.nio.HeapByteBuffer - return ((ByteBuffer) bigQueryObject).array(); - case FLOAT64: - return bigQueryObject; - case INT64: - return bigQueryObject; - case STRUCT: - case STRING: // STRING = org.apache.avro.util.Utf8 - return bigQueryObject.toString(); - case DATE: // DATE = org.apache.avro.util.Utf8 (yyyy-MM-dd) - // This is equivalent to a Timestamp with zeroed time. - return DateUtil.toTimestamp( - ISODateTimeFormat.dateParser() - .parseDateTime(bigQueryObject.toString()) - .withZone(DateTimeZone.UTC)); - case DATETIME: // DATETIME = org.apache.avro.util.Utf8 (yyyy-MM-ddTHH:mm:ss) - return DateUtil.toTimestamp( - ISODateTimeFormat.dateTimeParser() - .parseDateTime(bigQueryObject.toString()) - .withZone(DateTimeZone.UTC)); - case TIMESTAMP: // TIMESTAMP = java.lang.Long (in microseconds) - return Timestamp.newBuilder().setSeconds((long) bigQueryObject / 1000000).build(); - case TIME: // TIME = org.apache.avro.util.Utf8 (HH:mm:ss) - case NUMERIC: // NUMERIC = java.nio.HeapByteBuffer - default: - throw new UnsupportedOperationException( - String.format( - "BigQuery type conversion not implemented for %s", standardSQLTypeName)); - } - } - - public Value build() { - Object feastObject = getFeastObject(); - if (feastObject == null) { - return null; - } - Value.Builder builder = Value.newBuilder(); - try { - switch (getValueType()) { - case TIMESTAMP: - return builder.setTimestampVal((Timestamp) feastObject).build(); - case STRING: - return builder.setStringVal((String) feastObject).build(); - case INT64: - return builder.setInt64Val((long) feastObject).build(); - case FLOAT: - return builder.setFloatVal((float) feastObject).build(); - case BYTES: - return builder.setBytesVal(ByteString.copyFrom((byte[]) feastObject)).build(); - case BOOL: - return builder.setBoolVal((boolean) feastObject).build(); - case INT32: - return builder.setInt32Val((int) feastObject).build(); - case DOUBLE: - return builder.setDoubleVal((double) feastObject).build(); - } - } catch (ClassCastException e) { - throw new TypeConversionException("Could not cast bigquery type", e); - } - return builder.build(); - } - } - - @AllArgsConstructor - public static class ToBigQueryObject { - - private final ValueOrBuilder feastValue; - - public Object build() { - switch (feastValue.getValCase()) { - case BOOLVAL: - return feastValue.getBoolVal(); - case FLOATVAL: - return (double) feastValue.getFloatVal(); // all floats are 64 bit in BQ. - case INT32VAL: - return (long) feastValue.getInt32Val(); // all integers are 64 bit in BQ - case INT64VAL: - return feastValue.getInt64Val(); - case DOUBLEVAL: - return feastValue.getDoubleVal(); - case STRINGVAL: - return feastValue.getStringVal(); - case TIMESTAMPVAL: - return DateUtil.toDateTime(feastValue.getTimestampVal()) - .toString(ISODateTimeFormat.dateTime()); - case BYTESVAL: - return feastValue.getBytesVal().toByteArray(); - case VAL_NOT_SET: - default: - throw new UnsupportedOperationException( - String.format( - "Not implemented converting Value to BigQuery value for type %s", - feastValue.getValCase())); - } - } - } -} diff --git a/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseFactory.java b/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseFactory.java deleted file mode 100644 index 5572772fcc2..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.json; - -import com.google.auto.service.AutoService; -import feast.ingestion.model.Specs; -import feast.store.FeatureStoreWrite; -import feast.options.OptionsParser; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.FileStoreOptions; -import feast.store.warehouse.FeatureWarehouseFactory; -import lombok.AllArgsConstructor; - -@AutoService(FeatureWarehouseFactory.class) -@AllArgsConstructor -public class JsonFileWarehouseFactory implements FeatureWarehouseFactory { - - private static final String JSON_FILES_TYPE = "file.json"; - - @Override - public FeatureStoreWrite create(StorageSpec storageSpec, Specs specs) { - FileStoreOptions options = - OptionsParser.parse(storageSpec.getOptionsMap(), FileStoreOptions.class); - options.jobName = specs.getJobName(); - return new JsonFileWarehouseWrite(options); - } - - @Override - public String getType() { - return JSON_FILES_TYPE; - } -} diff --git a/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseWrite.java b/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseWrite.java deleted file mode 100644 index fd3050a049f..00000000000 --- a/ingestion/src/main/java/feast/store/warehouse/json/JsonFileWarehouseWrite.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.json; - -import static org.apache.beam.sdk.values.TypeDescriptors.kvs; -import static org.apache.beam.sdk.values.TypeDescriptors.strings; - -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.util.JsonFormat; -import feast.store.FeatureStoreWrite; -import feast.store.FileStoreOptions; -import feast.store.TextFileDynamicIO; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.transforms.MapElements; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -@AllArgsConstructor -public class JsonFileWarehouseWrite extends FeatureStoreWrite { - - private FileStoreOptions options; - - @Override - public PDone expand(PCollection input) { - return input.apply("Map to strings", MapElements.into(kvs(strings(), strings())).via( - (rowExtended) -> { - try { - return KV.of( - rowExtended.getRow().getEntityName(), - JsonFormat.printer().omittingInsignificantWhitespace() - .print(rowExtended.getRow()) - ); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - } - )).apply("Write Warehouse Json Files", new TextFileDynamicIO.Write(options, ".json")); - } -} diff --git a/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto b/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto index 3e7ce90cbc0..cb64dd715f6 100644 --- a/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto +++ b/ingestion/src/main/proto/feast_ingestion/types/CoalesceAccum.proto @@ -17,7 +17,7 @@ syntax = "proto3"; import "google/protobuf/timestamp.proto"; -import "feast/types/Feature.proto"; +import "feast/types/Field.proto"; option java_package = "feast_ingestion.types"; option java_outer_classname = "CoalesceAccumProto"; @@ -28,7 +28,7 @@ message CoalesceAccum { google.protobuf.Timestamp eventTimestamp = 3; string entityName = 4; - map features = 6; + map features = 6; // map of features to their counter values when they were last added to accumulator map featureMarks = 7; int64 counter = 8; diff --git a/ingestion/src/main/resources/logback.xml b/ingestion/src/main/resources/logback.xml index e9fc5ebbd5a..85197f167d4 100644 --- a/ingestion/src/main/resources/logback.xml +++ b/ingestion/src/main/resources/logback.xml @@ -18,10 +18,11 @@ - %d{HH:mm:ss} [%thread] %-5level %logger{50} - %msg%n + %d{HH:mm:ss} [%thread] %-5level %logger{100} - %msg%n + \ No newline at end of file diff --git a/ingestion/src/main/resources/schemas/deadletter_table_schema.json b/ingestion/src/main/resources/schemas/deadletter_table_schema.json new file mode 100644 index 00000000000..92381189073 --- /dev/null +++ b/ingestion/src/main/resources/schemas/deadletter_table_schema.json @@ -0,0 +1,34 @@ +{ + "fields": [ + { + "name": "timestamp", + "type": "TIMESTAMP", + "mode": "REQUIRED" + }, + { + "name": "job_name", + "type": "STRING", + "mode": "NULLABLE" + }, + { + "name": "transform_name", + "type": "STRING", + "mode": "NULLABLE" + }, + { + "name": "payload", + "type": "STRING", + "mode": "NULLABLE" + }, + { + "name": "error_message", + "type": "STRING", + "mode": "NULLABLE" + }, + { + "name": "stack_trace", + "type": "STRING", + "mode": "NULLABLE" + } + ] +} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/NormalizeFeatureRows.java b/ingestion/src/test/java/feast/NormalizeFeatureRows.java deleted file mode 100644 index beb3b1b485b..00000000000 --- a/ingestion/src/test/java/feast/NormalizeFeatureRows.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast; - -import com.google.common.collect.Lists; -import com.google.common.primitives.UnsignedBytes; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.List; -import org.apache.beam.sdk.transforms.MapElements; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.TypeDescriptor; - -public class NormalizeFeatureRows - extends PTransform, PCollection> { - - public static FeatureRow normalize(FeatureRow.Builder row) { - return normalize(row.build()); - } - - public static FeatureRow normalize(FeatureRow row) { - List features = Lists.newArrayList(row.getFeaturesList()); - features.sort( - (f1, f2) -> - UnsignedBytes.lexicographicalComparator().compare(f1.toByteArray(), f2.toByteArray())); - - return row.toBuilder() - .clearFeatures().addAllFeatures(features) - .setEventTimestamp(row.getEventTimestamp()) - .build(); - } - - @Override - public PCollection expand(PCollection input) { - return input - .apply( - "normalize rows", - MapElements.into(TypeDescriptor.of(FeatureRow.class)).via( - NormalizeFeatureRows::normalize)); - } -} diff --git a/ingestion/src/test/java/feast/SerializableCacheTest.java b/ingestion/src/test/java/feast/SerializableCacheTest.java deleted file mode 100644 index 6ad3d4d4ea9..00000000000 --- a/ingestion/src/test/java/feast/SerializableCacheTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast; - -import java.time.Duration; -import java.util.concurrent.atomic.AtomicInteger; -import org.junit.Assert; -import org.junit.Test; - -public class SerializableCacheTest { - @Test - public void testGet() { - - final AtomicInteger count = new AtomicInteger(0); - SerializableCache cache = - SerializableCache.builder() - .loadingFunction((str) -> str + " " + count.incrementAndGet()) - .build(); - - Assert.assertEquals("hello 1", cache.get("hello")); - Assert.assertEquals("hello 1", cache.get("hello")); - } - - @Test - public void testGetMaxSize() { - - final AtomicInteger count = new AtomicInteger(0); - SerializableCache cache = - SerializableCache.builder() - .loadingFunction((str) -> str + " " + count.incrementAndGet()) - .maximumSize(2) - .build(); - - Assert.assertEquals("x 1", cache.get("x")); - Assert.assertEquals("y 2", cache.get("y")); - - // still in cache - Assert.assertEquals("x 1", cache.get("x")); - Assert.assertEquals("y 2", cache.get("y")); - - // z will evict x from cache - Assert.assertEquals("z 3", cache.get("z")); - - // x not in cache and evict y - Assert.assertEquals("x 4", cache.get("x")); - Assert.assertEquals("y 5", cache.get("y")); - } - - @Test - public void testGetWithExpiry() throws InterruptedException { - final AtomicInteger count = new AtomicInteger(0); - SerializableCache cache = - SerializableCache.builder() - .loadingFunction((str) -> str + " " + count.incrementAndGet()) - .expireAfterAccess(Duration.ofMillis(100)) - .build(); - - Assert.assertEquals("x 1", cache.get("x")); - Assert.assertEquals("x 1", cache.get("x")); - - Thread.sleep(200); - Assert.assertEquals("x 2", cache.get("x")); - } -} diff --git a/ingestion/src/test/java/feast/TestHelper.java b/ingestion/src/test/java/feast/TestHelper.java deleted file mode 100644 index 34984266f6f..00000000000 --- a/ingestion/src/test/java/feast/TestHelper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast; - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.gson.Gson; -import java.io.IOException; -import java.io.OutputStream; - -public class TestHelper { - public static void writeYaml(Object model, OutputStream outputStream) throws IOException { - // convoluted way to make an import spec, converted it to json with gson, then to object with - // jackson, then to yaml. - String json = new Gson().toJson(model); - Object object = new ObjectMapper(new JsonFactory()).reader().readTree(json); - new ObjectMapper(new YAMLFactory()).writer().writeValue(outputStream, object); - } -} diff --git a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java index cffbd5ff38c..f18ee8bb9a6 100644 --- a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java +++ b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java @@ -19,24 +19,24 @@ import com.google.common.collect.Lists; import com.google.common.primitives.UnsignedBytes; +import feast.types.FieldProto.Field; +import feast.types.FeatureRowExtendedProto.FeatureRowExtended; +import feast.types.FeatureRowProto.FeatureRow; import java.util.List; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; public class ToOrderedFeatureRows extends PTransform, PCollection> { public static FeatureRow orderedFeatureRow(FeatureRow row) { - List features = Lists.newArrayList(row.getFeaturesList()); + List features = Lists.newArrayList(row.getFieldsList()); features.sort( (f1, f2) -> UnsignedBytes.lexicographicalComparator().compare(f1.toByteArray(), f2.toByteArray())); - return row.toBuilder().clearFeatures().addAllFeatures(features).build(); + return row.toBuilder().clearFields().addAllFields(features).build(); } @Override diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java deleted file mode 100644 index 183499b0bd3..00000000000 --- a/ingestion/src/test/java/feast/ingestion/ImportJobCSVTest.java +++ /dev/null @@ -1,642 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion; - -import static feast.FeastMatchers.hasCount; -import static feast.NormalizeFeatureRows.normalize; -import static org.junit.Assert.assertEquals; - -import com.google.common.base.Charsets; -import com.google.common.collect.Lists; -import com.google.common.io.Files; -import com.google.common.io.Resources; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.protobuf.util.Timestamps; -import feast.ToOrderedFeatureRows; -import feast.ingestion.boot.ImportJobModule; -import feast.ingestion.boot.TestPipelineModule; -import feast.ingestion.config.ImportJobSpecsSupplier; -import feast.ingestion.model.Features; -import feast.ingestion.model.Values; -import feast.ingestion.options.ImportJobPipelineOptions; -import feast.ingestion.util.ProtoUtil; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.MockFeatureErrorsFactory; -import feast.store.MockServingFactory; -import feast.store.MockWarehouseFactory; -import feast.store.errors.FeatureErrorsFactoryService; -import feast.store.serving.FeatureServingFactoryService; -import feast.store.warehouse.FeatureWarehouseFactoryService; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.text.ParseException; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionList; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -@Slf4j -public class ImportJobCSVTest { - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - @Rule - public TestPipeline testPipeline = TestPipeline.create(); - - public ImportJobSpecs getImportJobSpecs(ImportSpec importSpec, String dataFile) { - Path workspacePath = Paths.get(Resources.getResource("specs").getPath()); - ImportJobSpecs importJobSpecs = new ImportJobSpecsSupplier(workspacePath.toUri().toString()).get(); - return importJobSpecs.toBuilder().setImportSpec( - importSpec.toBuilder().putSourceOptions("path", dataFile) - ).build(); - } - - public ImportJobPipelineOptions initOptions() { - ImportJobPipelineOptions options = PipelineOptionsFactory.create() - .as(ImportJobPipelineOptions.class); - return options; - } - - @Test - public void testImportCSV() throws IOException, ParseException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.testInt32\n" - + " - featureId: testEntity.testString\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, csvFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList.of( - FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite() - .getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - List expectedRows = - Lists.newArrayList( - normalize( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .setEntityKey("1") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.testString", Values.ofString("a"))) - .build()), - normalize( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .setEntityKey("2") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(202))) - .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) - .build()), - normalize( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .setEntityKey("3") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(303))) - .addFeatures(Features.of("testEntity.testString", Values.ofString("c"))) - .build())); - - PAssert.that(writtenToErrors).satisfies(hasCount(0)); - PAssert.that(writtenToServing).satisfies(hasCount(3)); - PAssert.that(writtenToWarehouse).satisfies(hasCount(3)); - - PAssert.that(writtenToServing.apply("serving toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder(expectedRows); - - PAssert.that(writtenToWarehouse.apply("warehouse toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder(expectedRows); - - testPipeline.run(); - } - - @Test - public void testImportFileJson() throws IOException, ParseException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.json\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - name: x\n" - + " featureId: testEntity.testInt32\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File jsonFile = folder.newFile("data.json"); - Files.asCharSink(jsonFile, Charsets.UTF_8) - .write("{\"id\":1,\"x\":101}\n{\"id\":2,\"x\":202}\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, jsonFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite().getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList.of( - FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite().getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - List expectedRows = - Lists.newArrayList( - normalize( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .setEntityKey("1") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) - .build()), - normalize( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .setEntityKey("2") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(202))) - .build())); - - PAssert.that(writtenToErrors).satisfies(hasCount(0)); - - PAssert.that(writtenToServing.apply("serving toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder(expectedRows); - - PAssert.that(writtenToWarehouse.apply("warehouse toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder(expectedRows); - - testPipeline.run(); - } - - @Test - public void testImportCSV_withSample1() throws IOException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "jobOptions:\n" - + " sample.limit: 1\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.testInt32\n" - + " - featureId: testEntity.testString\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, csvFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList.of( - FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite() - .getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - PAssert.that(writtenToServing).satisfies(hasCount(1)); - PAssert.that(writtenToWarehouse).satisfies(hasCount(1)); - PAssert.that(writtenToErrors).satisfies(hasCount(0)); - - testPipeline.run(); - } - - @Test - public void testImportCSV_withCoalesceRows() throws IOException, ParseException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "jobOptions:\n" - + " coalesceRows.enabled: true\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampColumn: timestamp\n" - + " fields:\n" - + " - name: id\n" - + " - name: timestamp\n" - + " - featureId: testEntity.testInt32\n" - + " - featureId: testEntity.testString\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - Files.asCharSink(csvFile, Charsets.UTF_8) - .write("1,2018-09-25T00:00:00.000Z,101,a\n1,2018-09-26T00:00:00.000Z,,b\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, csvFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList.of( - FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite() - .getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - PAssert.that(writtenToErrors).satisfies(hasCount(0)); - - PAssert.that(writtenToServing.apply("serving toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder( - normalize( - FeatureRow.newBuilder() - .setEntityKey("1") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) - .setEventTimestamp(Timestamps.parse("2018-09-26T00:00:00.000Z")) - .build())); - - PAssert.that(writtenToWarehouse.apply("warehouse toFeatureRows", new ToOrderedFeatureRows())) - .containsInAnyOrder( - normalize( - FeatureRow.newBuilder() - .setEntityKey("1") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testInt32", Values.ofInt32(101))) - .addFeatures(Features.of("testEntity.testString", Values.ofString("a"))) - .setEventTimestamp(Timestamps.parse("2018-09-25T00:00:00.000Z")) - .build()), - normalize( - FeatureRow.newBuilder() - .setEntityKey("1") - .setEntityName("testEntity") - .addFeatures(Features.of("testEntity.testString", Values.ofString("b"))) - .setEventTimestamp(Timestamps.parse("2018-09-26T00:00:00.000Z")) - .build())); - - testPipeline.run(); - } - - /* - * ingestion no longer cares what the feature themselves say about which store they should write - * it instead always writes to the specs.getServingStoreSpec() and specs.getWarehouseStoreSpec(). - */ - public void testImportCSVUnknownServingStoreError() throws IOException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.unknownInt32\n" - // Unknown store is not available - + " - featureId: testEntity.testString\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, csvFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - injector.getInstance(ImportJob.class); - - // Job should fail during expand(), so we don't even need to start the pipeline. - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList.of( - FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite() - .getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PAssert.that(writtenToServing).satisfies(hasCount(1)); - PAssert.that(writtenToWarehouse).satisfies(hasCount(1)); - - testPipeline.run(); - } - - @Test - public void testImportWithErrors() throws IOException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.testString\n" - + " - featureId: testEntity.testInt32\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - - // Note the string and integer features are in the wrong positions for the import spec. - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, getImportJobSpecs(importSpec, csvFile.toString())), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - PAssert.that(writtenToErrors) - .satisfies( - (errors) -> { - int i = 0; - for (FeatureRowExtended row : errors) { - assertEquals( - row.getLastAttempt().getError().getCause(), - "feast.ingestion.exceptions.TypeConversionException"); - i += 1; - } - assertEquals(i, 3); - return null; - }); - - PAssert.that(writtenToServing).satisfies(hasCount(0)); - testPipeline.run(); - } - - - @Test(expected = IllegalArgumentException.class) - public void testImportWithUnsupportedWarehouse() throws IOException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.testInt64NoWarehouse\n" - + " - featureId: testEntity.testStringNoWarehouse\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - - // Note the string and integer features are in the wrong positions for the import spec. - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - ImportJobSpecs importJobSpecs = getImportJobSpecs(importSpec, csvFile.toString()).toBuilder() - .setWarehouseStorageSpec(StorageSpec.newBuilder().setId("WAREHOUSE").setType("unknown")) - .build(); - - Injector injector = - Guice.createInjector( - new ImportJobModule(options, importJobSpecs), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - - injector.getInstance(ImportJob.class); - job.expand(); - } - - - @Test - public void testImportWithoutWarehouseStoreSetByFeature() throws IOException { - ImportSpec importSpec = - ProtoUtil.decodeProtoYaml( - "---\n" - + "type: file.csv\n" - + "sourceOptions:\n" - + " # path: to be overwritten in tests\n" - + "entities:\n" - + " - testEntity\n" - + "schema:\n" - + " entityIdColumn: id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: id\n" - + " - featureId: testEntity.testInt64\n" - + " - featureId: testEntity.testString\n" - + "\n", - ImportSpec.getDefaultInstance()); - - File csvFile = folder.newFile("data.csv"); - - Files.asCharSink(csvFile, Charsets.UTF_8).write("1,101,a\n2,202,b\n3,303,c\n"); - - ImportJobPipelineOptions options = initOptions(); - - ImportJobSpecs importJobSpecs = getImportJobSpecs(importSpec, csvFile.toString()).toBuilder() - .clearWarehouseStorageSpec().build(); - Injector injector = - Guice.createInjector( - new ImportJobModule(options, importJobSpecs), - new TestPipelineModule(testPipeline)); - - ImportJob job = injector.getInstance(ImportJob.class); - - injector.getInstance(ImportJob.class); - job.expand(); - - PCollection writtenToServing = - PCollectionList - .of(FeatureServingFactoryService.get(MockServingFactory.class).getWrite() - .getInputs()) - .apply("flatten serving input", Flatten.pCollections()); - - PCollection writtenToWarehouse = - PCollectionList - .of(FeatureWarehouseFactoryService.get(MockWarehouseFactory.class).getWrite() - .getInputs()) - .apply("flatten warehouse input", Flatten.pCollections()); - - PCollection writtenToErrors = - PCollectionList - .of(FeatureErrorsFactoryService.get(MockFeatureErrorsFactory.class).getWrite() - .getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - PAssert.that(writtenToErrors).satisfies(hasCount(0)); - PAssert.that(writtenToServing).satisfies(hasCount(3)); - PAssert.that(writtenToWarehouse).satisfies(hasCount(0)); - testPipeline.run(); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java new file mode 100644 index 00000000000..5bf9a983779 --- /dev/null +++ b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java @@ -0,0 +1,160 @@ +package feast.ingestion; + +import com.google.common.io.Files; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.core.SourceProto.KafkaSourceConfig; +import feast.core.SourceProto.Source; +import feast.core.SourceProto.SourceType; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import feast.ingestion.options.ImportOptions; +import feast.storage.RedisProto.RedisKey; +import feast.test.TestUtil; +import feast.test.TestUtil.LocalKafka; +import feast.test.TestUtil.LocalRedis; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.ValueProto.ValueType.Enum; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.IntStream; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.PipelineResult.State; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import redis.clients.jedis.Jedis; + +public class ImportJobTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(ImportJobTest.class.getName()); + + private static final String KAFKA_HOST = "localhost"; + private static final int KAFKA_PORT = 19092; + private static final String KAFKA_BOOTSTRAP_SERVERS = KAFKA_HOST + ":" + KAFKA_PORT; + private static final short KAFKA_REPLICATION_FACTOR = 1; + private static final String KAFKA_TOPIC = "topic_1"; + private static final long KAFKA_PUBLISH_TIMEOUT_SEC = 10; + + @SuppressWarnings("UnstableApiUsage") + private static final String ZOOKEEPER_DATA_DIR = Files.createTempDir().getAbsolutePath(); + private static final String ZOOKEEPER_HOST = "localhost"; + private static final int ZOOKEEPER_PORT = 2182; + + private static final String REDIS_HOST = "localhost"; + private static final int REDIS_PORT = 6380; + + // Expected time taken for the import job to be ready to receive Feature Row input + private static final int IMPORT_JOB_READY_DURATION_SEC = 5; + // Expected time taken for the import job to finish writing to Store + private static final int IMPORT_JOB_RUN_DURATION_SEC = 30; + + @BeforeClass + public static void setup() throws IOException, InterruptedException { + LocalKafka.start(KAFKA_HOST, KAFKA_PORT, KAFKA_REPLICATION_FACTOR, true, ZOOKEEPER_HOST, + ZOOKEEPER_PORT, ZOOKEEPER_DATA_DIR); + LocalRedis.start(REDIS_PORT); + } + + @AfterClass + public static void tearDown() { + LocalRedis.stop(); + LocalKafka.stop(); + } + + @Test + public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() + throws IOException, InterruptedException { + FeatureSetSpec spec = + FeatureSetSpec.newBuilder().setName("feature_set").setVersion(3) + .addEntities(EntitySpec.newBuilder() + .setName("entity_id_primary").setValueType(Enum.INT32).build()) + .addEntities(EntitySpec.newBuilder() + .setName("entity_id_secondary").setValueType(Enum.STRING).build()) + .addFeatures(FeatureSpec.newBuilder() + .setName("feature_1").setValueType(Enum.STRING_LIST).build()) + .addFeatures(FeatureSpec.newBuilder() + .setName("feature_2").setValueType(Enum.STRING).build()) + .addFeatures(FeatureSpec.newBuilder() + .setName("feature_3").setValueType(Enum.INT64).build()) + .setSource(Source.newBuilder() + .setType(SourceType.KAFKA).setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers(KAFKA_HOST + ":" + KAFKA_PORT) + .setTopic(KAFKA_TOPIC).build()) + .build()) + .build(); + + Store redis = + Store.newBuilder().setName(StoreType.REDIS.toString()).setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder() + .setHost(REDIS_HOST).setPort(REDIS_PORT).build()) + .addSubscriptions(Subscription.newBuilder() + .setName(spec.getName()).setVersion(String.valueOf(spec.getVersion())).build()) + .build(); + + ImportOptions options = PipelineOptionsFactory.create().as(ImportOptions.class); + options.setFeatureSetSpecJson( + Collections.singletonList( + JsonFormat.printer().omittingInsignificantWhitespace().print(spec))); + options.setStoreJson( + Collections.singletonList( + JsonFormat.printer().omittingInsignificantWhitespace().print(redis))); + options.setProject(""); + options.setBlockOnRun(false); + + int inputSize = 128; + List input = new ArrayList<>(); + Map expected = new HashMap<>(); + + LOGGER.info("Generating test data ..."); + IntStream.range(0, inputSize).forEach(i -> { + FeatureRow randomRow = TestUtil.createRandomFeatureRow(spec); + RedisKey redisKey = TestUtil.createRedisKey(spec, randomRow); + input.add(randomRow); + expected.put(redisKey, randomRow); + }); + + LOGGER.info("Starting Import Job with the following options: {}", options.toString()); + PipelineResult pipelineResult = ImportJob.runPipeline(options); + Thread.sleep(Duration.ofSeconds(IMPORT_JOB_READY_DURATION_SEC).toMillis()); + Assert.assertEquals(pipelineResult.getState(), State.RUNNING); + + LOGGER.info("Publishing {} Feature Row messages to Kafka ...", input.size()); + TestUtil.publishFeatureRowsToKafka(KAFKA_BOOTSTRAP_SERVERS, KAFKA_TOPIC, input, + ByteArraySerializer.class, KAFKA_PUBLISH_TIMEOUT_SEC); + Thread.sleep(Duration.ofSeconds(IMPORT_JOB_RUN_DURATION_SEC).toMillis()); + + LOGGER.info("Validating the actual values written to Redis ..."); + Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT); + expected.forEach((key, expectedValue) -> { + byte[] actualByteValue = jedis.get(key.toByteArray()); + Assert.assertNotNull("Key not found in Redis: " + key, actualByteValue); + FeatureRow actualValue = null; + try { + actualValue = FeatureRow.parseFrom(actualByteValue); + } catch (InvalidProtocolBufferException e) { + Assert.fail(String + .format("Actual Redis value cannot be parsed as FeatureRow, key: %s, value :%s", + key, new String(actualByteValue, StandardCharsets.UTF_8))); + } + Assert.assertEquals(expectedValue, actualValue); + }); + } +} diff --git a/ingestion/src/test/java/feast/ingestion/boot/TestPipelineModule.java b/ingestion/src/test/java/feast/ingestion/boot/TestPipelineModule.java deleted file mode 100644 index 1800fa27173..00000000000 --- a/ingestion/src/test/java/feast/ingestion/boot/TestPipelineModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.boot; - -import com.google.inject.AbstractModule; -import lombok.AllArgsConstructor; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.testing.TestPipeline; - -@AllArgsConstructor -public class TestPipelineModule extends AbstractModule { - private TestPipeline testPipeline; - - @Override - protected void configure() { - bind(Pipeline.class).toInstance(testPipeline); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java b/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java deleted file mode 100644 index 7314c60b72f..00000000000 --- a/ingestion/src/test/java/feast/ingestion/config/ImportJobSpecsSupplierTest.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.config; - -import static org.junit.Assert.assertEquals; - -import com.google.protobuf.util.JsonFormat; -import feast.ingestion.model.Specs; -import feast.ingestion.util.DateUtil; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType.Enum; -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.file.Files; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -public class ImportJobSpecsSupplierTest { - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - String importSpecYaml = - "---\n" - + "servingStorageSpec:\n" - + " id: TEST_SERVING\n" - + " type: serving.mock\n" - + " options: {}\n" - + "warehouseStorageSpec:\n" - + " id: TEST_WAREHOUSE\n" - + " type: warehouse.mock\n" - + " options: {}\n" - + "errorsStorageSpec:\n" - + " id: ERRORS\n" - + " type: stdout\n" - + " options: {}\n" - + "entitySpecs:\n" - + " - name: testEntity\n" - + " description: This is a test entity\n" - + " tags: []\n" - + "featureSpecs:\n" - + " - id: testEntity.testInt64\n" - + " entity: testEntity\n" - + " name: testInt64\n" - + " owner: feast@example.com\n" - + " description: This is test feature of type integer\n" - + " uri: https://example.com/\n" - + " valueType: INT64\n" - + " tags: []\n" - + " options: {}\n" - + "importSpec:\n" - + " type: file.csv\n" - + " sourceOptions:\n" - + " path: data.csv\n" - + " entities:\n" - + " - driver\n" - + " schema:\n" - + " entityIdColumn: driver_id\n" - + " timestampValue: 2018-09-25T00:00:00.000Z\n" - + " fields:\n" - + " - name: timestamp\n" - + " - name: driver_id\n" - + " - name: trips_completed\n" - + " featureId: driver.trips_completed\n" - + "\n"; - - @Test - public void testSupplierImportSpecYamlFile() throws IOException { - File yamlFile = temporaryFolder.newFile("importJobSpecs.yaml"); - try (PrintWriter printWriter = new PrintWriter(Files.newOutputStream(yamlFile.toPath()))) { - printWriter.print(importSpecYaml); - } - - ImportJobSpecs importJobSpecs = new ImportJobSpecsSupplier(yamlFile.getParent()).get(); - Specs specs = new Specs("", importJobSpecs); - System.out.println( - JsonFormat.printer().omittingInsignificantWhitespace().print(importJobSpecs)); - assertEquals( - ImportSpec.newBuilder() - .setType("file.csv") - .putSourceOptions("path", "data.csv") - .addEntities("driver") - .setSchema( - Schema.newBuilder() - .addFields(Field.newBuilder().setName("timestamp")) - .addFields(Field.newBuilder().setName("driver_id")) - .addFields( - Field.newBuilder() - .setName("trips_completed") - .setFeatureId("driver.trips_completed")) - .setEntityIdColumn("driver_id") - .setTimestampValue(DateUtil.toTimestamp("2018-09-25T00:00:00.000Z"))) - .build(), - importJobSpecs.getImportSpec()); - - assertEquals(StorageSpec.newBuilder() - .setId("TEST_SERVING") - .setType("serving.mock") - .build(), importJobSpecs.getServingStorageSpec()); - - assertEquals(StorageSpec.newBuilder() - .setId("TEST_WAREHOUSE") - .setType("warehouse.mock") - .build(), importJobSpecs.getWarehouseStorageSpec()); - - assertEquals(StorageSpec.newBuilder() - .setId("ERRORS") - .setType("stdout") - .build(), importJobSpecs.getErrorsStorageSpec()); - - assertEquals( - EntitySpec.newBuilder() - .setName("testEntity") - .setDescription("This is a test entity") - .build(), - specs.getEntitySpec("testEntity")); - - assertEquals( - FeatureSpec.newBuilder() - .setId("testEntity.testInt64") - .setEntity("testEntity") - .setName("testInt64") - .setOwner("feast@example.com") - .setUri("https://example.com/") - .setValueType(Enum.INT64) - .setDescription("This is test feature of type integer") - .build(), - specs.getFeatureSpec("testEntity.testInt64")); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/model/FeaturesTest.java b/ingestion/src/test/java/feast/ingestion/model/FeaturesTest.java deleted file mode 100644 index d57a16bfaa0..00000000000 --- a/ingestion/src/test/java/feast/ingestion/model/FeaturesTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import feast.types.FeatureProto.Feature; -import feast.types.ValueProto.Value; -import org.junit.Assert; -import org.junit.Test; - - -public class FeaturesTest { - - @Test - public void testFeatureOf() { - Value value = Value.newBuilder().setInt32Val(123).build(); - Feature feature = Features.of("a.feature.id", value); - Assert.assertEquals("a.feature.id", feature.getId()); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java b/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java deleted file mode 100644 index fe402877ab0..00000000000 --- a/ingestion/src/test/java/feast/ingestion/model/SpecsTest.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; - -import com.google.common.io.Resources; -import feast.ingestion.config.ImportJobSpecsSupplier; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import java.nio.file.Path; -import java.nio.file.Paths; -import org.junit.Before; -import org.junit.Test; - -public class SpecsTest { - - ImportJobSpecs importJobSpecs; - - private Field.Builder newField(String featureId) { - return Field.newBuilder().setFeatureId(featureId); - } - - @Before - public void before() { - Path path = Paths.get(Resources.getResource("specs/").getPath()); - importJobSpecs = new ImportJobSpecsSupplier(path.toString()).get(); - } - - @Test - public void testSingleFeatureAndEntity() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - assertEquals("testjob", specs.getJobName()); - assertEquals(importJobSpecs.getImportSpec(), specs.getImportSpec()); - - assertEquals(1, specs.getEntitySpecs().size()); - assertTrue(specs.getEntitySpecs().containsKey("testEntity")); - - assertEquals(1, specs.getFeatureSpecs().size()); - assertTrue(specs.getFeatureSpecs().containsKey("testEntity.testInt32")); - - assertTrue(specs.getServingStorageSpec().getId().equals("TEST_SERVING")); - } - - @Test(expected = IllegalArgumentException.class) - public void testErrorOnUnknownEntity() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - specs.getEntitySpec("unknown"); - } - - @Test(expected = IllegalArgumentException.class) - public void testErrorOnUnknownFeature() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - specs.getFeatureSpec("unknown"); - } - - @Test - public void testGetFeatureSpec() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - assertEquals( - "testEntity.testInt32", specs.getFeatureSpec("testEntity.testInt32").getId()); - } - - @Test - public void testGetEntitySpec() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - assertEquals("testEntity", specs.getEntitySpec("testEntity").getName()); - } - - @Test - public void testGetStorageSpec() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - - assertEquals(specs.getWarehouseStorageSpec().getId(), "TEST_WAREHOUSE"); - assertEquals(specs.getServingStorageSpec().getId(), "TEST_SERVING"); - } - - @Test(expected = IllegalArgumentException.class) - public void testFeatureSpecReferencesUnknownEntity() { - ImportJobSpecs importJobSpecs = this.importJobSpecs.toBuilder() - .setImportSpec(ImportSpec.newBuilder() - .addEntities("totally_different_entity") - .setSchema(Schema.newBuilder().addFields(newField("testEntity.testInt32"))) - ).build(); - - Specs specs = Specs.of("testjob", importJobSpecs); - specs.validate(); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/model/ValuesTest.java b/ingestion/src/test/java/feast/ingestion/model/ValuesTest.java deleted file mode 100644 index 73a939422cc..00000000000 --- a/ingestion/src/test/java/feast/ingestion/model/ValuesTest.java +++ /dev/null @@ -1,323 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.model; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import com.google.common.collect.ImmutableMap; -import feast.ingestion.util.DateUtil; -import com.google.protobuf.ByteString; -import com.google.protobuf.Timestamp; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueType.Enum; -import java.util.Base64; -import java.util.Map; -import java.util.function.Function; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.Assert; -import org.junit.Test; - -public class ValuesTest { - static class ValueTest { - Value input; - Value expectedValue; - Class expectedThrowable; - - private ValueTest() {} - - public static ValueTest of(Value input, Value expected) { - ValueTest test = new ValueTest(); - test.input = input; - test.expectedValue = expected; - return test; - } - - public static ValueTest of(Value input, Class expected) { - ValueTest test = new ValueTest(); - test.input = input; - test.expectedThrowable = expected; - return test; - } - - void apply(Function func) { - try { - Value output = func.apply(input); - if (expectedThrowable != null) { - fail("expected error"); - } - assertEquals(expectedValue, output); - } catch (Throwable e) { - if (expectedThrowable == null || !expectedThrowable.isInstance(e)) { - throw e; - } - } - } - } - - @Test - public void testValuesOfInt32() { - assertEquals(123, Values.ofInt32(123).getInt32Val()); - } - - @Test - public void testValuesOfInt64() { - assertEquals(123, Values.ofInt64(123).getInt64Val()); - } - - @Test - public void testValuesOfString() { - assertEquals("123", Values.ofString("123").getStringVal()); - } - - @Test - public void testValuesOfBytes() { - ByteString bytes = ByteString.copyFromUtf8("123"); - Assert.assertArrayEquals( - bytes.toByteArray(), Values.ofBytes(bytes.toByteArray()).getBytesVal().toByteArray()); - } - - @Test - public void testValuesOfByteString() { - ByteString bytes = ByteString.copyFromUtf8("123"); - assertEquals(bytes, Values.ofBytes(bytes).getBytesVal()); - } - - @Test - public void testValuesOfTimestampDateTime() { - DateTime dateTime = DateTime.now().withZone(DateTimeZone.UTC); - Assert.assertEquals(dateTime, DateUtil.toDateTime(Values.ofTimestamp(dateTime).getTimestampVal())); - } - - @Test - public void testValuesOfTimestamp() { - Timestamp timestamp = DateUtil.toTimestamp(DateTime.now()); - assertEquals(timestamp, Values.ofTimestamp(timestamp).getTimestampVal()); - } - - @Test - public void testValuesOfBool() { - assertTrue(Values.ofBool(true).getBoolVal()); - assertFalse(Values.ofBool(false).getBoolVal()); - } - - @Test - public void testValuesOfDouble() { - assertEquals(Math.PI, Values.ofDouble(Math.PI).getDoubleVal(), 0.0); - } - - @Test - public void testValuesOfFloat() { - float val = Double.valueOf(Math.PI).floatValue(); - assertEquals(val, Values.ofFloat(val).getFloatVal(), 0.0); - } - - @Test - public void testToValueTypeFromValue() { - assertEquals(Enum.STRING, Values.toValueType(Values.ofString("asdf"))); - assertEquals(Enum.INT64, Values.toValueType(Values.ofInt64(1234))); - assertEquals(Enum.INT32, Values.toValueType(Values.ofInt32(1234))); - assertEquals(Enum.TIMESTAMP, Values.toValueType(Values.ofTimestamp(DateTime.now()))); - assertEquals(Enum.BOOL, Values.toValueType(Values.ofBool(false))); - assertEquals(Enum.BYTES, Values.toValueType(Values.ofBytes(ByteString.copyFromUtf8("abcd")))); - assertEquals(Enum.DOUBLE, Values.toValueType(Values.ofDouble(Math.PI))); - assertEquals(Enum.FLOAT, Values.toValueType(Values.ofFloat(1.234F))); - } - - @Test - public void testAsType() {} - - @Test - public void testAsString() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Map assertions = - ImmutableMap.builder() - .put(Values.ofString("asdf"), "asdf") - .put(Values.ofBool(true), "true") - .put(Values.ofFloat(1.234F), "1.234") - .put(Values.ofDouble(Math.PI), String.valueOf(Math.PI)) - .put(Values.ofInt32(Integer.MAX_VALUE), String.valueOf(Integer.MAX_VALUE)) - .put(Values.ofInt64(Long.MAX_VALUE), String.valueOf(Long.MAX_VALUE)) - .put(Values.ofTimestamp(datetime), String.valueOf(DateUtil.toString(datetime))) - .put(Values.ofBytes(bytes), Base64.getEncoder().encodeToString(bytes.toByteArray())) - .build(); - for (Value value : assertions.keySet()) { - assertEquals(Values.ofString(assertions.get(value)), Values.asString(value)); - } - } - - @Test - public void testAsTimestamp() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asTimestamp; - ValueTest.of(Values.ofString("asdclk"), IllegalArgumentException.class).apply(castFunc); - ValueTest.of( - Values.ofString("2019-01-01T12:01:02.123Z"), - Values.ofTimestamp(DateUtil.toTimestamp("2019-01-01T12:01:02.123Z"))) - .apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(1.234F), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), Values.ofTimestamp(datetime)).apply(castFunc); - } - - @Test - public void testAsInt64() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asInt64; - ValueTest.of(Values.ofString("asdclk"), NumberFormatException.class).apply(castFunc); - ValueTest.of( - Values.ofString(String.valueOf(Integer.MAX_VALUE)), Values.ofInt64(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), Values.ofInt64(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), Values.ofInt64(Long.MAX_VALUE)).apply(castFunc); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(1.234F), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } - - @Test - public void testAsFloat() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asFloat; - ValueTest.of(Values.ofString("asdclk"), NumberFormatException.class).apply(castFunc); - ValueTest.of(Values.ofString(String.valueOf(Float.MAX_VALUE)), Values.ofFloat(Float.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), Values.ofFloat(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(Float.MAX_VALUE), Values.ofFloat(Float.MAX_VALUE)).apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), Values.ofFloat((float)Math.PI)).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } - - @Test - public void testAsBool() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asBool; - ValueTest.of(Values.ofString("asdclk"), Values.ofBool(false)).apply(castFunc); - ValueTest.of(Values.ofString("True"), Values.ofBool(true)).apply(castFunc); - ValueTest.of(Values.ofString("true"), Values.ofBool(true)).apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), IllegalArgumentException.class) - .apply(castFunc); - ValueTest.of(Values.ofInt32(0), Values.ofBool(false)) - .apply(castFunc); - ValueTest.of(Values.ofInt32(1), Values.ofBool(true)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), IllegalArgumentException.class) - .apply(castFunc); - - ValueTest.of(Values.ofInt64(0), Values.ofBool(false)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(1), Values.ofBool(true)) - .apply(castFunc); - ValueTest.of(Values.ofBool(true), Values.ofBool(true)).apply(castFunc); - - ValueTest.of(Values.ofFloat(1.234F), IllegalArgumentException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(0), Values.ofBool(false)).apply(castFunc); - ValueTest.of(Values.ofFloat(1), Values.ofBool(true)).apply(castFunc); - - ValueTest.of(Values.ofDouble(Math.PI), IllegalArgumentException.class).apply(castFunc); - ValueTest.of(Values.ofDouble(0), Values.ofBool(false)).apply(castFunc); - ValueTest.of(Values.ofDouble(1), Values.ofBool(true)).apply(castFunc); - - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } - - @Test - public void testAsInt32() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asInt32; - ValueTest.of(Values.ofString("asdclk"), NumberFormatException.class).apply(castFunc); - ValueTest.of( - Values.ofString(String.valueOf(Integer.MAX_VALUE)), Values.ofInt32(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), Values.ofInt32(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Integer.MAX_VALUE), Values.ofInt32(Integer.MAX_VALUE)).apply(castFunc); - ValueTest.of(Values.ofInt64(Integer.MIN_VALUE), Values.ofInt32(Integer.MIN_VALUE)).apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), IllegalArgumentException.class); - ValueTest.of(Values.ofInt64(Long.MIN_VALUE), IllegalArgumentException.class); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(1.234F), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } - - @Test - public void testAsDouble() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asDouble; - ValueTest.of( - Values.ofString(String.valueOf(Double.MAX_VALUE)), Values.ofDouble(Double.MAX_VALUE)) - .apply(castFunc); - - ValueTest.of(Values.ofString("asdclk"), NumberFormatException.class).apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), Values.ofDouble(Integer.MAX_VALUE)) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), Values.ofDouble(Long.MAX_VALUE)).apply(castFunc); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(Float.MAX_VALUE), Values.ofDouble(Float.MAX_VALUE)).apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), Values.ofDouble(Math.PI)).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } - - @Test - public void testAsBytes() { - DateTime datetime = DateTime.now().withZone(DateTimeZone.UTC); - ByteString bytes = ByteString.copyFromUtf8("asdfasdfasdf"); - Function castFunc = Values::asBytes; - ValueTest.of( - Values.ofString(String.valueOf(Double.MAX_VALUE)), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofInt32(Integer.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofInt64(Long.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofBool(true), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofFloat(Float.MAX_VALUE), UnsupportedOperationException.class) - .apply(castFunc); - ValueTest.of(Values.ofDouble(Math.PI), UnsupportedOperationException.class).apply(castFunc); - ValueTest.of(Values.ofBytes(bytes), Values.ofBytes(bytes)); - ValueTest.of(Values.ofTimestamp(datetime), UnsupportedOperationException.class).apply(castFunc); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/options/JobOptionsTest.java b/ingestion/src/test/java/feast/ingestion/options/JobOptionsTest.java deleted file mode 100644 index 59f4a478103..00000000000 --- a/ingestion/src/test/java/feast/ingestion/options/JobOptionsTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.options; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import feast.options.OptionsParser; -import java.util.HashMap; -import java.util.Map; -import org.junit.Test; - -public class JobOptionsTest { - - @Test - public void test_shouldParseAll() { - Map map = new HashMap<>(); - map.put("coalesceRows.enabled", "false"); - map.put("coalesceRows.delaySeconds", "123"); - map.put("coalesceRows.timeoutSeconds", "1800"); - map.put("sample.limit", "1234"); - - - JobOptions options = OptionsParser.parse(map, JobOptions.class); - assertEquals(options.getSampleLimit(), 1234L); - assertEquals(options.getCoalesceRowsDelaySeconds(), 123L); - assertFalse(options.isCoalesceRowsEnabled()); - assertEquals(options.getCoalesceRowsTimeoutSeconds(), 1800L); - } - - @Test - public void test_shouldParseEmptyOptions() { - JobOptions options = OptionsParser.parse(new HashMap<>(), JobOptions.class); - assertEquals(options.getSampleLimit(), 0L); - assertEquals(options.getCoalesceRowsDelaySeconds(), 0L); - assertTrue(options.isCoalesceRowsEnabled()); //defaults to true - assertEquals(options.getCoalesceRowsTimeoutSeconds(), 0L); - } - - @Test - public void test_shouldParseSampleLimit() { - Map map = new HashMap<>(); - map.put("sample.limit", "1234"); - JobOptions options = OptionsParser.parse(map, JobOptions.class); - assertEquals(options.getSampleLimit(), 1234L); - } - - @Test - public void test_shouldParseCoalesceRowsDelaySeconds() { - Map map = new HashMap<>(); - map.put("coalesceRows.delaySeconds", "123"); - JobOptions options = OptionsParser.parse(map, JobOptions.class); - assertEquals(options.getCoalesceRowsDelaySeconds(), 123L); - } - - @Test - public void test_shouldParseCoalesceRowsEnabled() { - Map map = new HashMap<>(); - map.put("coalesceRows.enabled", "true"); - JobOptions options = OptionsParser.parse(map, JobOptions.class); - assertTrue(options.isCoalesceRowsEnabled()); - - } - - - @Test - public void test_shouldParseCoalesceRowsTimeoutSeconds() { - Map map = new HashMap<>(); - map.put("coalesceRows.timeoutSeconds", "1800"); - JobOptions options = OptionsParser.parse(map, JobOptions.class); - assertEquals(options.getCoalesceRowsTimeoutSeconds(), 1800L); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/ingestion/transform/CoalesceFeatureRowsTest.java b/ingestion/src/test/java/feast/ingestion/transform/CoalesceFeatureRowsTest.java deleted file mode 100644 index 4d46e8925a9..00000000000 --- a/ingestion/src/test/java/feast/ingestion/transform/CoalesceFeatureRowsTest.java +++ /dev/null @@ -1,568 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import static feast.NormalizeFeatureRows.normalize; -import static feast.ingestion.transform.CoalesceFeatureRows.toFeatureRow; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertEquals; - -import com.google.common.collect.Lists; -import com.google.protobuf.Timestamp; -import feast.FeastMatchers; -import feast.NormalizeFeatureRows; -import feast.ingestion.model.Features; -import feast.ingestion.model.Values; -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast_ingestion.types.CoalesceAccumProto.CoalesceAccum; -import java.util.List; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.testing.TestStream; -import org.apache.beam.sdk.transforms.Count; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.joda.time.Duration; -import org.joda.time.Instant; -import org.junit.Rule; -import org.junit.Test; - -public class CoalesceFeatureRowsTest { - - private static final Timestamp DEFAULT_TIMESTAMP = Timestamp.getDefaultInstance(); - private static final FeatureRow DEFAULT_FEATURE_ROW = FeatureRow.getDefaultInstance().toBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP).build(); - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testBatch_withDistictKeys_shouldPassThroughNonIntersectingKeys() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEventTimestamp(Timestamp.getDefaultInstance()).setEntityKey("1") - .build(), - FeatureRow.newBuilder().setEventTimestamp(Timestamp.getDefaultInstance()).setEntityKey("2") - .build()); - - PCollection input = pipeline.apply(Create.of(rows)) - .setCoder(ProtoCoder.of(FeatureRow.class)); - - PCollection output = input.apply(new CoalesceFeatureRows()); - - PAssert.that(output.apply(Count.globally())).containsInAnyOrder(2L); - PAssert.that(output).containsInAnyOrder(rows); - - pipeline.run(); - } - - @Test - public void test_withNoFeaturesSameTimestamp_shouldReturn1() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEventTimestamp(Timestamp.getDefaultInstance()).setEntityKey("1") - .build(), - FeatureRow.newBuilder().setEventTimestamp(Timestamp.getDefaultInstance()).setEntityKey("1") - .build()); - PCollection input = pipeline.apply(Create.of(rows)) - .setCoder(ProtoCoder.of(FeatureRow.class)); - - PCollection output = input.apply(new CoalesceFeatureRows()); - - assertThat(CoalesceFeatureRows.combineFeatureRows(rows), equalTo(rows.get(0))); - PAssert.that(output.apply(Count.globally())).containsInAnyOrder(1L); - PAssert.that(output).containsInAnyOrder(rows.get(0)); - - pipeline.run(); - } - - @Test - public void testBatch_withNoFeaturesDifferentSeconds_shouldReturnLatest() { - List rows1 = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setSeconds(1)).build(), - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)).build()); - - assertThat(CoalesceFeatureRows.combineFeatureRows(rows1), equalTo(rows1.get(1))); - assertThat(CoalesceFeatureRows.combineFeatureRows(Lists.reverse(rows1)), - equalTo(rows1.get(1))); - } - - - @Test - public void testBatch_withNoFeaturesDifferentNanos_shouldReturnLatest() { - List rows1 = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setNanos(1)).build(), - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setNanos(2)).build()); - - assertThat(CoalesceFeatureRows.combineFeatureRows(rows1), equalTo(rows1.get(1))); - assertThat(CoalesceFeatureRows.combineFeatureRows(Lists.reverse(rows1)), - equalTo(rows1.get(1))); - } - - @Test - public void testBatch_shouldMergeFeatures() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build()); - PCollection input = pipeline.apply(Create.of(rows)) - .setCoder(ProtoCoder.of(FeatureRow.class)); - - PCollection output = input.apply(new CoalesceFeatureRows()); - - PAssert.that(output.apply(Count.globally())).containsInAnyOrder(1L); - PAssert.that(output.apply(new NormalizeFeatureRows())).containsInAnyOrder( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.getDefaultInstance()) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - - pipeline.run(); - } - - @Test - public void testStream_shouldMergeFeatures() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build()); - - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkTo(start) - .addElements(rows.get(0)) - .addElements(rows.get(1)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, Duration.ZERO)); - - PAssert.that(output.apply(Count.globally())).containsInAnyOrder(1L); - PAssert.that(output).containsInAnyOrder( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - pipeline.run(); - } - - @Test - public void testStream_shouldIncludeRowAddedOnTimerEdge() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f3").setValue(Values.ofInt32(3))) - .build()); - - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkTo(start) - .addElements(rows.get(0)) - .advanceWatermarkTo(start.plus(delay)) - // This row will be included in the same pane because it's exactly - // on the same water mark as the onTimer event - .addElements(rows.get(1)) - .advanceWatermarkTo(start.plus(delay).plus(delay).plus(delay)) - .addElements(rows.get(2)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input - .apply(new CoalesceFeatureRows(delay, Duration.ZERO)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(2L)); - PAssert.that(output.apply(new NormalizeFeatureRows())).containsInAnyOrder( - normalize(FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build()), - normalize(FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f3").setValue(Values.ofInt32(3))) - .build()) - ); - pipeline.run(); - } - - @Test - public void testStream_shouldMergeFeatures_emittingPanes_overlappingTimers() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkTo(start) - .addElements(rows.get(0)) - .advanceWatermarkTo(start.plus(delay.dividedBy(2))) - // second event before time triggers - .addElements(rows.get(1)) - .advanceWatermarkTo(start.plus(delay)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, Duration.ZERO)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(1L)); - PAssert.that(output.apply(new NormalizeFeatureRows())).containsInAnyOrder( - normalize(FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - ) - ); - pipeline.run(); - } - - @Test - public void testStream_shouldNotSetTimerWhilePending() { - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkTo(start) - .addElements(DEFAULT_FEATURE_ROW) - // this should not reset the timer as the first is still pending. - .advanceWatermarkTo(start.plus(delay.dividedBy(2))) - .addElements(DEFAULT_FEATURE_ROW) - // timer should trigger causing the first output row - .advanceWatermarkTo(start.plus(delay).plus(delay.dividedBy(2))) - .addElements(FeatureRow.getDefaultInstance()) // this should cause a second output row. - .advanceWatermarkTo(start.plus(delay).plus(delay)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, Duration.ZERO)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(2L)); - PAssert.that(output).containsInAnyOrder(DEFAULT_FEATURE_ROW, DEFAULT_FEATURE_ROW); - pipeline.run(); - } - - @Test - public void testStream_shouldOnlyEmitNewFeaturesInSecondPane() { - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkTo(start) - .addElements( - FeatureRow.newBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Features.of("f1", Values.ofString("a"))) - .build()) - // this should should emit a row - .advanceWatermarkTo(start.plus(delay).plus(delay)) - .addElements( - FeatureRow.newBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Features.of("f2", Values.ofString("b"))) - .build()) - // this should emit a row with f2 but without f1 because it hasn't had an update - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, Duration.ZERO)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(2L)); - PAssert.that(output).containsInAnyOrder( - FeatureRow.newBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Features.of("f1", Values.ofString("a"))) - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP) - .addFeatures(Features.of("f2", Values.ofString("b"))) - .build()); - pipeline.run(); - } - - @Test - public void test_combineFeatureRows_shouldCountRows() { - List rows = Lists.newArrayList( - FeatureRow.getDefaultInstance(), - FeatureRow.getDefaultInstance(), - FeatureRow.getDefaultInstance()); - CoalesceAccum accum = CoalesceFeatureRows - .combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows); - assertEquals(3, accum.getCounter()); - } - - @Test - public void test_combineFeatureRows_shouldOverwriteWhenLaterEventTimestampProcessedSecond() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("a"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(1)) - .build(), - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("b"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .build()); - CoalesceAccum accum = CoalesceFeatureRows - .combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows); - assertEquals(accum.getFeaturesMap().get("f1"), Features.of("f1", Values.ofString("b"))); - } - - @Test - public void test_combineFeatureRows_shouldNotOverwriteWhenEarlierEventTimestampProcessedSecond() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("b"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .build(), - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("a"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(1)) - .build()); - CoalesceAccum accum = CoalesceFeatureRows - .combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows); - assertEquals(accum.getFeaturesMap().get("f1"), Features.of("f1", Values.ofString("b"))); - } - - @Test - public void test_combineFeatureRows_shouldOverwriteWhenSameEventTimestampProcessedSecond() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("a"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .build(), - FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("b"))) - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .build() - ); - CoalesceAccum accum = CoalesceFeatureRows - .combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows); - assertEquals(accum.getFeaturesMap().get("f1"), Features.of("f1", Values.ofString("b"))); - } - - @Test - public void test_shouldPickLatestFeatures() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp( - Timestamp.newBuilder().setSeconds(1)) // old row with non unique feature - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(2))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setSeconds(1)) // old row with unique feature - .addFeatures(Feature.newBuilder().setId("f3").setValue(Values.ofInt32(3))) - .build()); - - CoalesceAccum accum = CoalesceFeatureRows - .combineFeatureRows(CoalesceAccum.getDefaultInstance(), rows); - assertEquals(3, accum.getCounter()); - assertThat(toFeatureRow(accum, 0), equalTo( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(2))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .addFeatures(Feature.newBuilder().setId("f3").setValue(Values.ofInt32(3))) - .build() - )); - } - - @Test - public void testStream_withNoInput() { - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows()); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(0L)); - pipeline.run(); - } - - @Test - public void testBatch_withNoInput() { - PCollection input = pipeline.apply(Create.empty(ProtoCoder.of(FeatureRow.class))); - PCollection output = input.apply(new CoalesceFeatureRows()); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(0L)); - pipeline.run(); - } - - @Test - public void testStream_withTimeout_shouldRemoveState() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - - Instant start = new Instant(); - Duration delay = Duration.standardSeconds(10); - Duration timeout = Duration.standardMinutes(30); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .addElements(rows.get(0)) - .advanceWatermarkTo(start.plus(timeout)) - // first element should get fired, as the delay water mark is reached before the timeout - // watermark, then state should be cleared when it reaches the timeout watermark. - .addElements(rows.get(1)) - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, timeout)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(2L)); - PAssert.that(output.apply(new NormalizeFeatureRows())).containsInAnyOrder( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .setEventTimestamp(Timestamp.getDefaultInstance()) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .setEventTimestamp(Timestamp.getDefaultInstance()) - .build() - ); - pipeline.run(); - } - - @Test - public void testStream_withDelayAfterTimeout_shouldProcessBagBeforeClear() { - List rows = Lists.newArrayList( - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .build(), - FeatureRow.newBuilder().setEntityKey("1") - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - - Instant start = new Instant(); - Duration delay = Duration.standardMinutes(40); - Duration timeout = Duration.standardMinutes(30); - TestStream testStream = TestStream.create(ProtoCoder.of(FeatureRow.class)) - .addElements(rows.get(0)) - .addElements(rows.get(1)) - // first element should get fired, as the delay water mark is reached before the timeout - // watermark, then state should be cleared when it reaches the timeout watermark. - // If it didn't process the bag before clearing it, we'd get no output events at all. - .advanceWatermarkToInfinity(); - - PCollection input = pipeline.apply(testStream); - PCollection output = input.apply(new CoalesceFeatureRows(delay, timeout)); - - PAssert.that(output).satisfies(FeastMatchers.hasCount(1L)); - PAssert.that(output.apply(new NormalizeFeatureRows())).containsInAnyOrder( - FeatureRow.newBuilder().setEntityKey("1") - .setEventTimestamp(Timestamp.getDefaultInstance()) - .addFeatures(Feature.newBuilder().setId("f1").setValue(Values.ofInt32(1))) - .addFeatures(Feature.newBuilder().setId("f2").setValue(Values.ofInt32(2))) - .build() - ); - pipeline.run(); - } - - @Test - public void test_toFeatureRow_shouldBeNewMarkedFeaturesOnly() { - CoalesceAccum accum = CoalesceAccum.newBuilder() - .putFeatures("f1", Features.of("f1", Values.ofString("a"))) - .putFeatures("f2", Features.of("f2", Values.ofString("b"))) - .putFeatures("f3", Features.of("f3", Values.ofString("c"))) - .putFeatures("f4", Features.of("f4", Values.ofString("d"))) - .putFeatureMarks("f1", 1) - .putFeatureMarks("f2", 1) - .putFeatureMarks("f3", 2) - .putFeatureMarks("f4", 3) - .setCounter(3) - .build(); - - FeatureRow output = normalize(toFeatureRow(accum, 0)); - assertThat(output, - equalTo(FeatureRow.newBuilder() - .addFeatures(Features.of("f1", Values.ofString("a"))) - .addFeatures(Features.of("f2", Values.ofString("b"))) - .addFeatures(Features.of("f3", Values.ofString("c"))) - .addFeatures(Features.of("f4", Values.ofString("d"))) - .setEventTimestamp(DEFAULT_TIMESTAMP) - .build())); - output = normalize(toFeatureRow(accum, 1)); - assertThat(output, - equalTo(FeatureRow.newBuilder() - .addFeatures(Features.of("f3", Values.ofString("c"))) - .addFeatures(Features.of("f4", Values.ofString("d"))) - .setEventTimestamp(DEFAULT_TIMESTAMP) - .build())); - output = normalize(toFeatureRow(accum, 2)); - assertThat(output, - equalTo(FeatureRow.newBuilder() - .addFeatures(Features.of("f4", Values.ofString("d"))) - .setEventTimestamp(DEFAULT_TIMESTAMP) - .build())); - output = normalize(toFeatureRow(accum, 3)); - assertThat(output, - equalTo(FeatureRow.newBuilder() - .setEventTimestamp(DEFAULT_TIMESTAMP) - .build())); - } - - @Test(expected = IllegalArgumentException.class) - public void test_toFeatureRow_markTooHigh_shouldThrow() { - CoalesceAccum accum = CoalesceAccum.newBuilder() - .putFeatures("f1", Features.of("f1", Values.ofString("a"))) - .putFeatures("f2", Features.of("f2", Values.ofString("b"))) - .putFeatures("f3", Features.of("f3", Values.ofString("c"))) - .putFeatures("f4", Features.of("f4", Values.ofString("d"))) - .putFeatureMarks("f1", 1) - .putFeatureMarks("f2", 1) - .putFeatureMarks("f3", 2) - .putFeatureMarks("f4", 3) - .setCounter(3) - .build(); - normalize(toFeatureRow(accum, 4)); - // we throw an exception because use case should check that we have new features before trying - // to emit them. - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/ingestion/transform/ErrorsStoreTransformTest.java b/ingestion/src/test/java/feast/ingestion/transform/ErrorsStoreTransformTest.java deleted file mode 100644 index 9458e3ce481..00000000000 --- a/ingestion/src/test/java/feast/ingestion/transform/ErrorsStoreTransformTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform; - -import static feast.ingestion.model.Errors.toError; -import static feast.store.MockFeatureErrorsFactory.MOCK_ERRORS_STORE_TYPE; -import static feast.store.errors.logging.StderrFeatureErrorsFactory.TYPE_STDERR; -import static feast.store.errors.logging.StdoutFeatureErrorsFactory.TYPE_STDOUT; -import static org.junit.Assert.assertEquals; - -import feast.ingestion.model.Specs; -import feast.ingestion.options.ImportJobPipelineOptions; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.store.MockFeatureErrorsFactory; -import feast.store.errors.FeatureErrorsFactoryService; -import feast.types.FeatureRowExtendedProto.Attempt; -import feast.types.FeatureRowExtendedProto.Error; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionList; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -@Slf4j -public class ErrorsStoreTransformTest { - - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - private ImportJobPipelineOptions options; - private PCollection inputs; - private List errors; - - public Specs getSpecs(String errorsStorageType) { - return new Specs("test", ImportJobSpecs.newBuilder() - .setErrorsStorageSpec(StorageSpec.newBuilder() - .setId("ERRORS") - .setType(errorsStorageType)).build()); - } - - @Before - public void setUp() { - options = PipelineOptionsFactory.create().as(ImportJobPipelineOptions.class); - options.setJobName("test"); - - errors = - Arrays.asList( - errorOf("test", new Exception("err")), errorOf("test", new Exception("err2"))); - inputs = pipeline.apply(Create.of(errors)).setCoder(ProtoCoder.of(FeatureRowExtended.class)); - } - - private FeatureRowExtended errorOf(String transform, Throwable cause) { - Error error = toError(transform, cause); - return FeatureRowExtended.newBuilder() - .setLastAttempt(Attempt.newBuilder().setError(error).build()) - .build(); - } - - @Test - public void shouldWriteToGivenErrorsStore() { - ErrorsStoreTransform transform = new ErrorsStoreTransform( - FeatureErrorsFactoryService.getAll(), - getSpecs(MOCK_ERRORS_STORE_TYPE), options); - transform.expand(inputs); - - MockFeatureErrorsFactory factory = FeatureErrorsFactoryService - .get(MockFeatureErrorsFactory.class); - - PCollection writtenToErrors = - PCollectionList.of(factory.getWrite().getInputs()) - .apply("flatten errors input", Flatten.pCollections()); - - PAssert.that(writtenToErrors).containsInAnyOrder(errors); - pipeline.run(); - } - - @Test - public void logErrorsToStdErr() { - ErrorsStoreTransform transform = new ErrorsStoreTransform( - FeatureErrorsFactoryService.getAll(), - getSpecs(TYPE_STDERR), options); - inputs.apply(transform); - pipeline.run(); - } - - - @Test - public void logErrorsToStdOut() { - ErrorsStoreTransform transform = new ErrorsStoreTransform( - FeatureErrorsFactoryService.getAll(), - getSpecs(TYPE_STDOUT), options); - inputs.apply(transform); - pipeline.run(); - } - - @Test - public void logErrorsToWorkspace() throws IOException, InterruptedException { - String tempWorkspace = tempFolder.newFolder().toString(); - options.setWorkspace(tempWorkspace); - ErrorsStoreTransform transform = new ErrorsStoreTransform( - FeatureErrorsFactoryService.getAll(), - getSpecs(""), options); - inputs.apply(transform); - pipeline.run().waitUntilFinish(); - - int lineCount = Files.list(Paths.get(tempWorkspace) - .resolve("errors") // errors workspace dir - .resolve("test") // test entity dir - ).flatMap(path -> { - try { - return Files.readAllLines(path).stream(); - } catch (IOException e) { - throw new RuntimeException(); - } - }).collect(Collectors.toList()).size(); - assertEquals(2, lineCount); - } -} - diff --git a/ingestion/src/test/java/feast/ingestion/transform/fn/ConvertTypesDoFnTest.java b/ingestion/src/test/java/feast/ingestion/transform/fn/ConvertTypesDoFnTest.java deleted file mode 100644 index 7723329d78a..00000000000 --- a/ingestion/src/test/java/feast/ingestion/transform/fn/ConvertTypesDoFnTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.ingestion.transform.fn; - - -import com.google.common.collect.Lists; -import feast.ingestion.model.Features; -import feast.ingestion.model.Specs; -import feast.ingestion.model.Values; -import feast.ingestion.util.DateUtil; -import feast.ingestion.values.PFeatureRows; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.ImportJobSpecsProto; -import feast.types.FeatureRowExtendedProto.Attempt; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.ValueType.Enum; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.junit.Rule; -import org.junit.Test; - -@Slf4j -public class ConvertTypesDoFnTest { - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testStringTo() { - FeatureRowExtended row = FeatureRowExtended.newBuilder().setRow( - FeatureRow.newBuilder().addAllFeatures(Lists.newArrayList( - Features.of("STRING_TO_INT32", Values.ofString("123")), - Features.of("STRING_TO_INT64", Values.ofString("123")), - Features.of("STRING_TO_FLOAT", Values.ofString("123")), - Features.of("STRING_TO_DOUBLE", Values.ofString("123")), - Features.of("STRING_TO_STRING", Values.ofString("123")), - Features.of("STRING_TO_BOOL", Values.ofString("true")), - Features.of("STRING_TO_TIMESTAMP", Values.ofString("2019-01-31T19:19:19.123Z")) - ))).build(); - - List featureSpecs = Lists.newArrayList( - FeatureSpec.newBuilder().setId("STRING_TO_INT32").setValueType(Enum.INT32).build(), - FeatureSpec.newBuilder().setId("STRING_TO_INT64").setValueType(Enum.INT64).build(), - FeatureSpec.newBuilder().setId("STRING_TO_FLOAT").setValueType(Enum.FLOAT).build(), - FeatureSpec.newBuilder().setId("STRING_TO_DOUBLE").setValueType(Enum.DOUBLE).build(), - FeatureSpec.newBuilder().setId("STRING_TO_BOOL").setValueType(Enum.BOOL).build(), - FeatureSpec.newBuilder().setId("STRING_TO_STRING").setValueType(Enum.STRING).build(), - FeatureSpec.newBuilder().setId("STRING_TO_TIMESTAMP").setValueType(Enum.TIMESTAMP).build() - ); - PFeatureRows output = PFeatureRows.of(pipeline.apply(Create.of(row))) - .applyDoFn("name", - new ConvertTypesDoFn( - new Specs("", ImportJobSpecsProto.ImportJobSpecs.newBuilder() - .addAllFeatureSpecs(featureSpecs).build()))); - - PAssert.that(output.getErrors()).satisfies(rows -> { - if (rows.iterator().hasNext()) { - log.error(rows.iterator().next().getLastAttempt().getError().toString()); - } - return null; - }); - - PAssert.that(output.getMain()).containsInAnyOrder(FeatureRowExtended.newBuilder().setRow( - FeatureRow.newBuilder().addAllFeatures(Lists.newArrayList( - Features.of("STRING_TO_INT32", Values.ofInt32(123)), - Features.of("STRING_TO_INT64", Values.ofInt64(123L)), - Features.of("STRING_TO_FLOAT", Values.ofFloat(123F)), - Features.of("STRING_TO_DOUBLE", Values.ofDouble(123.0)), - Features.of("STRING_TO_STRING", Values.ofString("123")), - Features.of("STRING_TO_BOOL", Values.ofBool(true)), - Features.of("STRING_TO_TIMESTAMP", - Values.ofTimestamp(DateUtil.toTimestamp("2019-01-31T19:19:19.123Z"))) - ))).setLastAttempt(Attempt.getDefaultInstance()).build()); - pipeline.run(); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java b/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java deleted file mode 100644 index e8fcc5818a9..00000000000 --- a/ingestion/src/test/java/feast/ingestion/transform/fn/FilterFeatureRowDoFnTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.ingestion.transform.fn; - -import feast.types.FeatureProto.Feature; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; -import java.util.Arrays; -import java.util.List; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; - -public class FilterFeatureRowDoFnTest { - @Rule public TestPipeline testPipeline = TestPipeline.create(); - - @Test - public void shouldIgnoreUnspecifiedFeatureID() { - String featureId1 = "testentity.feature1"; - String featureId2 = "testentity.feature2"; - String featureId3 = "testentity.feature3"; - - List specifiedFeatureIds = Arrays.asList(featureId1, featureId2, featureId3); - FilterFeatureRowDoFn doFn = new FilterFeatureRowDoFn(specifiedFeatureIds); - - FeatureRow row = - FeatureRow.newBuilder() - .setEntityKey("1234") - .setEntityName("testentity") - .addFeatures( - Feature.newBuilder().setId(featureId1).setValue(Value.newBuilder().setInt64Val(10))) - .addFeatures( - Feature.newBuilder().setId(featureId2).setValue(Value.newBuilder().setInt64Val(11))) - .addFeatures( - Feature.newBuilder().setId(featureId3).setValue(Value.newBuilder().setInt64Val(12))) - // this feature should be ignored - .addFeatures(Feature.newBuilder().setId("testEntity.unknown_feature")) - .build(); - - PCollection output = testPipeline.apply(Create.of(row)) - .apply(ParDo.of(doFn)); - - FeatureRow expRow = - FeatureRow.newBuilder() - .setEntityKey("1234") - .setEntityName("testentity") - .addFeatures( - Feature.newBuilder().setId(featureId1).setValue(Value.newBuilder().setInt64Val(10))) - .addFeatures( - Feature.newBuilder().setId(featureId2).setValue(Value.newBuilder().setInt64Val(11))) - .addFeatures( - Feature.newBuilder().setId(featureId3).setValue(Value.newBuilder().setInt64Val(12))) - .build(); - PAssert.that(output).containsInAnyOrder(expRow); - - testPipeline.run(); - } -} diff --git a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java index 9a2d57a23c3..ed0402a07cf 100644 --- a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java @@ -23,6 +23,7 @@ import static org.hamcrest.Matchers.not; import com.google.protobuf.Timestamp; +import feast.ingestion.utils.DateUtil; import junit.framework.TestCase; import org.joda.time.DateTime; diff --git a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java index ee91f120463..6ffe4ebf826 100644 --- a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java @@ -20,6 +20,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import feast.ingestion.utils.JsonUtil; import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/ingestion/src/test/java/feast/ingestion/util/ProtoUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/ProtoUtilTest.java deleted file mode 100644 index f99a432b822..00000000000 --- a/ingestion/src/test/java/feast/ingestion/util/ProtoUtilTest.java +++ /dev/null @@ -1,66 +0,0 @@ -package feast.ingestion.util; - -import static org.junit.Assert.assertEquals; - -import feast.DriverAreaProto.DriverArea; -import java.io.BufferedWriter; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import lombok.extern.slf4j.Slf4j; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -@Slf4j -public class ProtoUtilTest { - - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - @Test - public void testDecodeProtoYaml() throws IOException { - - String yaml = "" - + "driverId: 1\n" - + "areaId: 2"; - - DriverArea da = ProtoUtil.decodeProtoYaml(yaml, DriverArea.getDefaultInstance()); - assertEquals(1, da.getDriverId()); - assertEquals(2, da.getAreaId()); - } - - @Test - public void testDecodeProtoYamlFile() throws IOException { - String yaml = "" - + "driverId: 1\n" - + "areaId: 2"; - Path path = tempFolder.newFile("file.yaml").toPath(); - try (BufferedWriter w = Files.newBufferedWriter(path)) { - w.write(yaml); - } - - DriverArea da = ProtoUtil.decodeProtoYamlFile(path, DriverArea.getDefaultInstance()); - assertEquals(1, da.getDriverId()); - assertEquals(2, da.getAreaId()); - } - - @Test - @Ignore - // Uncomment this during manual testing, as we don't want running unit tests to require GCS access - public void testDecodeProtoYamlFileGCS() throws IOException { - String yaml = "" - + "driverId: 1\n" - + "areaId: 2"; - Path path = PathUtil.getPath("gs://feast-temp/file.yaml"); - log.info(path.toUri().toString()); - try (BufferedWriter w = Files.newBufferedWriter(path)) { - w.write(yaml); - } - - DriverArea da = ProtoUtil.decodeProtoYamlFile(path, DriverArea.getDefaultInstance()); - assertEquals(1, da.getDriverId()); - assertEquals(2, da.getAreaId()); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java new file mode 100644 index 00000000000..efc8e9ac0c6 --- /dev/null +++ b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java @@ -0,0 +1,28 @@ +package feast.ingestion.util; + +import static feast.types.ValueProto.ValueType.Enum.INT32; +import static feast.types.ValueProto.ValueType.Enum.STRING_LIST; + +import com.google.cloud.bigquery.BigQuery; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.ingestion.utils.StoreUtil; +import org.junit.Test; +import org.mockito.Mockito; + +public class StoreUtilTest { + @Test + public void setupBigQuery_shouldCreateTable_givenFeatureSetSpec() { + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder() + .setName("feature_set_1") + .setVersion(1) + .addEntities(EntitySpec.newBuilder().setName("entity_1").setValueType(INT32)) + .addFeatures(FeatureSpec.newBuilder().setName("feature_1").setValueType(INT32)) + .addFeatures(FeatureSpec.newBuilder().setName("feature_2").setValueType(STRING_LIST)) + .build(); + BigQuery mockedBigquery = Mockito.mock(BigQuery.class); + StoreUtil.setupBigQuery(featureSetSpec, "project-1", "dataset_1", mockedBigquery); + } +} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/options/OptionsParserTest.java b/ingestion/src/test/java/feast/options/OptionsParserTest.java deleted file mode 100644 index 76ef68c331e..00000000000 --- a/ingestion/src/test/java/feast/options/OptionsParserTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.options; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -import com.fasterxml.jackson.annotation.JsonProperty; -import javax.validation.constraints.NotNull; -import javax.validation.constraints.Positive; -import org.junit.Test; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class OptionsParserTest { - - @Test - public void getJsonSchema() { - String jsonSchema = - "{\"type\":\"object\",\"properties\":{\"foo\":{\"type\":\"string\"},\"bar\":{\"type\":\"integer\"}}}"; - assertEquals(jsonSchema, OptionsParser.getJsonSchema(TestType1.class)); - } - - @Test - public void parseOptions() { - TestType1 options = - OptionsParser.parse( - ImmutableMap.builder().put("foo", "x").put("bar", "1").build(), - TestType1.class); - assertEquals("x", options.foo); - assertEquals(1, options.bar); - } - - @Test - public void lenientParseOptions() { - TestType1 options = - OptionsParser.lenientParse( - ImmutableMap.builder().put("foo", "x").put("bar", "1") - .put("ignoreme", "123").build(), - TestType1.class); - assertEquals("x", options.foo); - assertEquals(1, options.bar); - } - - @Test - public void parseOptionsMissingOption() { - TestType1 options = - OptionsParser.parse( - ImmutableMap.builder().put("bar", "1").build(), TestType1.class); - assertNull(options.foo); - assertEquals(1, options.bar); - } - - @Test(expected = IllegalArgumentException.class) - public void parseOptionsUnknownField() { - OptionsParser.parse( - ImmutableMap.builder().put("foo", "x").put("biz", "1").build(), - TestType1.class); - } - - @Test - public void parseOptionsCustomProps() { - TestType2 options = - OptionsParser.parse( - ImmutableMap.builder() - .put("test.foo", "x") - .put("test.bar", "1") - .build(), - TestType2.class); - assertEquals("x", options.foo); - assertEquals(1, options.bar); - } - - @Test - public void parseOptionsWithValidation() { - TestType3 options = - OptionsParser.parse( - ImmutableMap.builder().put("foo", "x").put("bar", "1").build(), - TestType3.class); - assertEquals("x", options.foo); - assertEquals(1, options.bar); - } - - @Test(expected = IllegalArgumentException.class) - public void parseOptionsWithValidationInvalid() { - OptionsParser.parse( - ImmutableMap.builder().put("foo", "x").put("bar", "-1").build(), - TestType3.class); - } - - public static class TestType1 implements Options { - - public String foo; - public int bar; - } - - public static class TestType2 implements Options { - - @JsonProperty(value = "test.foo") - public String foo; - - @JsonProperty(value = "test.bar") - public int bar; - } - - public static class TestType3 implements Options { - - @NotNull - public String foo; - @Positive - public int bar; - } -} diff --git a/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java b/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java deleted file mode 100644 index e33bcc0249e..00000000000 --- a/ingestion/src/test/java/feast/source/bigquery/BQToFeatureRowFnTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.bigquery; - -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.cloud.bigquery.LegacySQLTypeName; -import com.google.common.collect.Lists; -import com.google.protobuf.Timestamp; -import org.apache.avro.generic.GenericRecord; -import org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord; -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Test; -import feast.ingestion.model.Features; -import feast.ingestion.model.Values; -import feast.ingestion.util.DateUtil; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.types.FeatureRowProto.FeatureRow; - -public class BQToFeatureRowFnTest { - @Test - public void testImportSpecFieldsMissingFromBQTable() { - // TODO what if a field in the import spec is not in the bq schema - } - - @Test - public void testStringEntityKey() { - Timestamp now = DateUtil.toTimestamp(DateTime.now()); - ImportSpec importSpec = - ImportSpec.newBuilder() - .setType("bigquery") - .addEntities("testEntity") - .setSchema( - Schema.newBuilder() - .setTimestampValue(now) - .setEntityIdColumn("bq_id") - .addFields(Field.newBuilder().setName("bq_timestamp")) - .addFields(Field.newBuilder().setName("bq_id")) - .addFields( - Field.newBuilder() - .setName("bq_value") - .setFeatureId("testEntity.testInt64"))) - .build(); - - GenericRecord record = mock(GenericRecord.class); - when(record.get("bq_id")).thenReturn("abcd"); - when(record.get("bq_timestamp")) - .thenReturn(now.getSeconds() * 1000000); // BQ uses Long in microseconds - when(record.get("bq_value")).thenReturn(Long.MAX_VALUE); - TableSchema tableSchema = new TableSchema(); - // Type names are strings that must match LegacySQLTypeName.class - tableSchema.setFields( - Lists.newArrayList( - new TableFieldSchema().setName("bq_id").setType(LegacySQLTypeName.STRING.name()), - new TableFieldSchema().setName("bq_timestamp").setType(LegacySQLTypeName.TIMESTAMP.name()), - new TableFieldSchema().setName("bq_value").setType(LegacySQLTypeName.INTEGER.name()))); - SchemaAndRecord schemaAndRecord = new SchemaAndRecord(record, tableSchema); - FeatureRow row = new BigQueryToFeatureRowFn(importSpec).apply(schemaAndRecord); - Assert.assertEquals(now, row.getEventTimestamp()); - Assert.assertEquals("abcd", row.getEntityKey()); - Assert.assertEquals("testEntity", row.getEntityName()); - Assert.assertThat( - row.getFeaturesList(), - equalTo( - Lists.newArrayList( - Features.of("testEntity.testInt64", Values.ofInt64(Long.MAX_VALUE))))); - } - - @Test - public void testInt64EntityKey() { - Timestamp now = DateUtil.toTimestamp(DateTime.now()); - ImportSpec importSpec = - ImportSpec.newBuilder() - .setType("bigquery") - .addEntities("testEntity") - .setSchema( - Schema.newBuilder() - .setTimestampValue(now) - .setEntityIdColumn("bq_id") - .addFields(Field.newBuilder().setName("bq_timestamp")) - .addFields(Field.newBuilder().setName("bq_id")) - .addFields( - Field.newBuilder() - .setName("bq_value") - .setFeatureId("testEntity.testInt64"))) - .build(); - - GenericRecord record = mock(GenericRecord.class); - when(record.get("bq_id")).thenReturn(1234L); - when(record.get("bq_timestamp")) - .thenReturn(now.getSeconds() * 1000000); // BQ uses Long in microseconds - when(record.get("bq_value")).thenReturn(Long.MAX_VALUE); - TableSchema tableSchema = new TableSchema(); - // Type names are strings that must match LegacySQLTypeName.class - tableSchema.setFields( - Lists.newArrayList( - new TableFieldSchema().setName("bq_id").setType(LegacySQLTypeName.INTEGER.name()), - new TableFieldSchema().setName("bq_timestamp").setType(LegacySQLTypeName.TIMESTAMP.name()), - new TableFieldSchema().setName("bq_value").setType(LegacySQLTypeName.INTEGER.name()))); - SchemaAndRecord schemaAndRecord = new SchemaAndRecord(record, tableSchema); - FeatureRow row = new BigQueryToFeatureRowFn(importSpec).apply(schemaAndRecord); - Assert.assertEquals(now, row.getEventTimestamp()); - Assert.assertEquals("1234", row.getEntityKey()); - Assert.assertEquals("testEntity", row.getEntityName()); - Assert.assertThat( - row.getFeaturesList(), - equalTo( - Lists.newArrayList( - Features.of("testEntity.testInt64", Values.ofInt64(Long.MAX_VALUE))))); - } -} diff --git a/ingestion/src/test/java/feast/source/bigquery/BigQuerySourceOptionsTest.java b/ingestion/src/test/java/feast/source/bigquery/BigQuerySourceOptionsTest.java deleted file mode 100644 index 27b9842e0d6..00000000000 --- a/ingestion/src/test/java/feast/source/bigquery/BigQuerySourceOptionsTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.bigquery; - -import feast.options.OptionsParser; -import feast.source.bigquery.BigQueryFeatureSource.BigQuerySourceOptions; -import org.junit.Assert; -import org.junit.Test; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class BigQuerySourceOptionsTest { - - @Test - public void testParse() { - BigQuerySourceOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("dataset", "dataset1") - .put("table", "table1") - .build(), - BigQuerySourceOptions.class); - - Assert.assertEquals("project1", options.project); - Assert.assertEquals("dataset1", options.dataset); - Assert.assertEquals("table1", options.table); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoProject() { - OptionsParser.parse( - ImmutableMap.builder() - .put("dataset", "dataset1") - .put("table", "table1") - .build(), - BigQuerySourceOptions.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoDataset() { - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("table", "table1") - .build(), - BigQuerySourceOptions.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoTable() { - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("dataset", "dataset1") - .build(), - BigQuerySourceOptions.class); - } -} diff --git a/ingestion/src/test/java/feast/source/common/ValueMapToFeatureRowTransformTest.java b/ingestion/src/test/java/feast/source/common/ValueMapToFeatureRowTransformTest.java deleted file mode 100644 index 246dc6317de..00000000000 --- a/ingestion/src/test/java/feast/source/common/ValueMapToFeatureRowTransformTest.java +++ /dev/null @@ -1,234 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.common; - -import static feast.source.csv.StringToValueMapTransform.VALUE_MAP_CODER; -import static org.junit.Assert.assertEquals; - -import com.google.common.collect.Lists; -import feast.ingestion.model.Features; -import feast.ingestion.model.Values; -import feast.ingestion.util.DateUtil; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.Schema; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; -import java.util.HashMap; -import java.util.Map; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.joda.time.DateTime; -import org.junit.Rule; -import org.junit.Test; - -public class ValueMapToFeatureRowTransformTest { - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - - @Test - public void testTimestampValue() { - Schema schema = Schema.newBuilder() - .setTimestampValue(DateUtil.toTimestamp(DateTime.now())) - .build(); - Map map = new HashMap<>(); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals("entity", row.getEntityName()); - assertEquals(schema.getTimestampValue(), row.getEventTimestamp()); - return null; - }); - pipeline.run(); - } - - @Test - public void testTimestampColumn() { - Schema schema = Schema.newBuilder() - .setTimestampColumn("timestamp") - .addFields(Field.newBuilder().setName("c1").build()).build(); - - Map map = new HashMap<>(); - map.put("timestamp", Values.asTimestamp(Values.ofString("2019-01-30T19:30:12Z"))); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals("entity", row.getEntityName()); - assertEquals(map.get("timestamp").getTimestampVal(), row.getEventTimestamp()); - return null; - }); - pipeline.run(); - } - - @Test - public void testTimestampColumnOptional() { - // schema field for timestamp column is optional, as long as it can be provided by the source - Schema schema = Schema.newBuilder() - .setTimestampColumn("timestamp").build(); - - Map map = new HashMap<>(); - map.put("timestamp", Values.asTimestamp(Values.ofString("2019-01-30T19:30:12Z"))); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals("entity", row.getEntityName()); - assertEquals(map.get("timestamp").getTimestampVal(), row.getEventTimestamp()); - return null; - }); - pipeline.run(); - } - - @Test - public void testEntityKeyColumn() { - Schema schema = Schema.newBuilder() - .setEntityIdColumn("driver_id").build(); - - Map map = new HashMap<>(); - map.put("driver_id", Values.ofString("1234")); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals("entity", row.getEntityName()); - assertEquals("1234", row.getEntityKey()); - return null; - }); - pipeline.run(); - } - - @Test - public void testEntityKeyColumnTypeConverted() { - Schema schema = Schema.newBuilder() - .setEntityIdColumn("driver_id").build(); - - Map map = new HashMap<>(); - map.put("driver_id", Values.ofInt64(12345)); //integer Value gets converted to string. - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals("entity", row.getEntityName()); - assertEquals("12345", row.getEntityKey()); - return null; - }); - pipeline.run(); - } - - @Test - public void testFieldPickedWithFeatureId() { - Schema schema = Schema.newBuilder() - .addFields(Field.newBuilder().setName("x").setFeatureId("f1")).build(); - - Map map = new HashMap<>(); - map.put("x", Values.ofInt64(1)); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals(1, row.getFeaturesList().size()); - assertEquals(Features.of("f1", Values.ofInt64(1)), row.getFeatures(0)); - - return null; - }); - pipeline.run(); - } - - @Test - public void testUnsetFeatureValue() { - Schema schema = Schema.newBuilder() - .addFields(Field.newBuilder().setName("x").setFeatureId("f1")).build(); - - Map map = new HashMap<>(); - map.put("x", Value.getDefaultInstance()); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals(0, row.getFeaturesList().size()); - return null; - }); - pipeline.run(); - } - - @Test - public void testFieldNonFeatureIdIgnored() { - Schema schema = Schema.newBuilder() - .addFields(Field.newBuilder().setName("x")).build(); - - Map map = new HashMap<>(); - map.put("x", Values.ofInt64(1)); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals(0, row.getFeaturesList().size()); - return null; - }); - pipeline.run(); - } - - @Test - public void testFeatureWithValueNotSetIsIgnored() { - Schema schema = Schema.newBuilder() - .addFields(Field.newBuilder().setName("x").setFeatureId("f1")).build(); - - Map map = new HashMap<>(); - map.put("x", Value.newBuilder().build()); - - PCollection output = pipeline - .apply(Create.of(Lists.newArrayList(map)).withCoder(VALUE_MAP_CODER)) - .apply(new ValueMapToFeatureRowTransform("entity", schema)); - - PAssert.that(output).satisfies(maps -> { - FeatureRow row = maps.iterator().next(); - assertEquals(0, row.getFeaturesList().size()); - return null; - }); - pipeline.run(); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/source/csv/ParseCSVTransformTest.java b/ingestion/src/test/java/feast/source/csv/ParseCSVTransformTest.java deleted file mode 100644 index 5fada5073b5..00000000000 --- a/ingestion/src/test/java/feast/source/csv/ParseCSVTransformTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.csv; - -import static feast.FeastMatchers.hasCount; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; - -import com.google.common.collect.Lists; -import feast.source.csv.ParseCsvTransform.CSVLineParser; -import feast.source.csv.ParseCsvTransform.StringMap; -import java.util.List; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.Pipeline.PipelineExecutionException; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; - -@Slf4j -public class ParseCSVTransformTest { - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testEmptyLine() { - PCollection> output = pipeline.apply(Create.of("")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList()).build()); - PAssert.that(output).satisfies(hasCount(0)); - pipeline.run(); - } - - @Test - public void testAllEmptyFields() { - PCollection> output = pipeline.apply(Create.of(",,")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList("a", "b", "c")).build()); - PAssert.that(output).satisfies(hasCount(1)); - PAssert.that(output).satisfies(rows -> { - Map row = rows.iterator().next(); - assertEquals(row.size(), 3); - assertEquals(row.get("a"), ""); - assertEquals(row.get("b"), ""); - assertEquals(row.get("c"), ""); - return null; - }); - pipeline.run(); - } - - @Test(expected = PipelineExecutionException.class) - public void testDuplicateHeaders() { - pipeline.apply(Create.of("1,2,3")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList("a", "a", "a")).build()); - pipeline.run(); - } - - @Test - public void testSomeEmptyFields() { - PCollection> output = pipeline.apply(Create.of("1,,3")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList("a", "b", "c")).build()); - PAssert.that(output).satisfies(hasCount(1)); - PAssert.that(output).satisfies(rows -> { - Map row = rows.iterator().next(); - assertEquals(row.size(), 3); - assertEquals(row.get("a"), "1"); - assertEquals(row.get("b"), ""); - assertEquals(row.get("c"), "3"); - return null; - }); - pipeline.run(); - } - - @Test - public void testTooManyHeaders() { - PCollection> output = pipeline.apply(Create.of("1,2")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList("a", "b", "c")).build()); - PAssert.that(output).satisfies(hasCount(1)); - PAssert.that(output).satisfies(rows -> { - Map row = rows.iterator().next(); - assertEquals(row.size(), 2); - assertEquals(row.get("a"), "1"); - assertEquals(row.get("b"), "2"); - return null; - }); - pipeline.run(); - } - - @Test - public void testNotEnoughHeaders() { - PCollection> output = pipeline.apply(Create.of("1,2,3")) - .apply(ParseCsvTransform.builder().header(Lists.newArrayList("a", "b")).build()); - PAssert.that(output).satisfies(hasCount(1)); - PAssert.that(output).satisfies(rows -> { - Map row = rows.iterator().next(); - assertEquals(row.size(), 2); - assertEquals(row.get("a"), "1"); - assertEquals(row.get("b"), "2"); - return null; - }); - pipeline.run(); - } - - @Test - public void testCSVLineParser_multiLines() { - CSVLineParser parser = new CSVLineParser(Lists.newArrayList("c1", "c2")); - List actual = parser.records("a,b\nc,d"); - - List expected = Lists.newArrayList( - new StringMap().thisput("c1", "a").thisput("c2", "b"), - new StringMap().thisput("c1", "c").thisput("c2", "d") - ); - assertThat(actual, is(expected)); - } - - @Test(expected = IllegalArgumentException.class) - public void testCSVLineParser_duplicateHeaders() { - CSVLineParser parser = new CSVLineParser(Lists.newArrayList("c1", "c1")); - List actual = parser.records("a,b"); - } - - @Test - public void testCSVLineParser_repeatedLines() { - CSVLineParser parser = new CSVLineParser(Lists.newArrayList("c1", "c2")); - List actual = parser.records("a,b"); - List expected = Lists.newArrayList( - new StringMap().thisput("c1", "a").thisput("c2", "b") - ); - assertThat(actual, is(expected)); - - actual = parser.records("c,d"); - expected = Lists.newArrayList( - new StringMap().thisput("c1", "c").thisput("c2", "d") - ); - assertThat(actual, is(expected)); - } -} diff --git a/ingestion/src/test/java/feast/source/csv/StringToValueMapTransformTest.java b/ingestion/src/test/java/feast/source/csv/StringToValueMapTransformTest.java deleted file mode 100644 index 3d523a6609f..00000000000 --- a/ingestion/src/test/java/feast/source/csv/StringToValueMapTransformTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.csv; - -import static feast.source.csv.StringToValueMapTransform.VALUE_MAP_CODER; -import static org.junit.Assert.assertEquals; - -import com.google.common.collect.Lists; -import feast.ingestion.model.Values; -import feast.source.csv.StringToValueMapTransform; -import feast.types.ValueProto.Value; -import java.util.HashMap; -import java.util.Map; -import org.apache.beam.sdk.coders.MapCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; - -public class StringToValueMapTransformTest { - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testEmptyValues() { - Map map = new HashMap<>(); - map.put("a", ""); - map.put("b", ""); - PCollection> input = pipeline.apply(Create.of(Lists.newArrayList(map)) - .withCoder(MapCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))); - - PCollection> output = input.apply(new StringToValueMapTransform()) - .setCoder(VALUE_MAP_CODER); - - PAssert.that(output).containsInAnyOrder(new HashMap<>()); - pipeline.run(); - } - - @Test - public void testStringValues() { - Map map = new HashMap<>(); - map.put("a", "abcd"); - map.put("b", "1234"); - PCollection> input = pipeline.apply(Create.of(Lists.newArrayList(map)) - .withCoder(MapCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))); - - PCollection> output = input.apply(new StringToValueMapTransform()) - .setCoder(VALUE_MAP_CODER); - - PAssert.that(output).satisfies(maps -> { - Map outMap = maps.iterator().next(); - assertEquals(outMap.get("a"), Values.ofString("abcd")); - assertEquals(outMap.get("b"), Values.ofString("1234")); - return null; - }); - pipeline.run(); - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/source/json/ParseJsonTransformTest.java b/ingestion/src/test/java/feast/source/json/ParseJsonTransformTest.java deleted file mode 100644 index 5b19954ca91..00000000000 --- a/ingestion/src/test/java/feast/source/json/ParseJsonTransformTest.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.json; - -import static org.junit.Assert.assertEquals; - -import feast.ingestion.model.Values; -import feast.source.json.ParseJsonTransform; -import feast.types.ValueProto.Value; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; - -@Slf4j -public class ParseJsonTransformTest { - - @Rule - public TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testJsonBool() { - PCollection> output = pipeline.apply(Create.of("{\"x\": true}")) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals(true, map.get("x").getBoolVal()); - return null; - }); - pipeline.run(); - } - - @Test - public void testJsonString() { - PCollection> output = pipeline.apply(Create.of("{\"x\": \"abc\"}")) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals("abc", map.get("x").getStringVal()); - return null; - }); - pipeline.run(); - } - - - @Test - public void testJsonObject() { - PCollection> output = pipeline.apply(Create.of("{\"x\": {}}")) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals("{}", map.get("x").getStringVal()); - return null; - }); - pipeline.run(); - } - - - @Test - public void testJsonLong() { - PCollection> output = pipeline - .apply(Create - .of(String.format("{\"max\": %s, \"min\": %s}", Long.MAX_VALUE, Long.MIN_VALUE))) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals(Values.ofInt64(Long.MAX_VALUE), map.get("max")); - assertEquals(Values.ofInt64(Long.MIN_VALUE), map.get("min")); - return null; - }); - pipeline.run(); - } - - @Test - public void testJsonInteger() { - PCollection> output = pipeline - .apply(Create - .of(String.format("{\"max\": %s, \"min\": %s}", Integer.MAX_VALUE, Integer.MIN_VALUE))) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals(Values.ofInt64(Integer.MAX_VALUE), map.get("max")); - assertEquals(Values.ofInt64(Integer.MIN_VALUE), map.get("min")); - return null; - }); - pipeline.run(); - } - - - @Test - public void testJsonDouble() { - PCollection> output = pipeline - .apply(Create - .of(String.format("{\"max\": %s, \"min\": %s}", Double.MAX_VALUE, Double.MIN_VALUE))) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals(Values.ofDouble(Double.MAX_VALUE), map.get("max")); - assertEquals(Values.ofDouble(Double.MIN_VALUE), map.get("min")); - return null; - }); - pipeline.run(); - } - - @Test - public void testJsonFloat() { - PCollection> output = pipeline - .apply(Create - .of(String - .format("{\"max\": %s, \"min\": %s, \"x\": %s}", Float.MAX_VALUE, Float.MIN_VALUE, - 0.12345678f))) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals(Values.ofDouble(Float.MAX_VALUE).getDoubleVal(), - map.get("max").getDoubleVal(), Float.MAX_VALUE / 2E7); - assertEquals(Values.ofDouble(0.12345678F).getDoubleVal(), map.get("x").getDoubleVal(), - 0.00000001); - assertEquals(Values.ofDouble(Float.MIN_VALUE).getDoubleVal(), - map.get("min").getDoubleVal(), Float.MIN_VALUE * 2E8); - - return null; - }); - pipeline.run(); - } - - @Test - public void testJsonArray() { - PCollection> output = pipeline.apply(Create.of("{\"x\": []}")) - .apply(new ParseJsonTransform()); - PAssert.that(output).satisfies((maps) -> { - Map map = maps.iterator().next(); - assertEquals("[]", map.get("x").getStringVal()); - return null; - }); - pipeline.run(); - } -} diff --git a/ingestion/src/test/java/feast/source/kafka/KafkaFeatureSourceTest.java b/ingestion/src/test/java/feast/source/kafka/KafkaFeatureSourceTest.java deleted file mode 100644 index 50ffa1e87fa..00000000000 --- a/ingestion/src/test/java/feast/source/kafka/KafkaFeatureSourceTest.java +++ /dev/null @@ -1,125 +0,0 @@ -package feast.source.kafka; - -import feast.specs.ImportSpecProto.ImportSpec; -import feast.types.FeatureRowProto.FeatureRow; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollection.IsBounded; -import org.apache.kafka.clients.producer.Producer; -import org.apache.kafka.common.serialization.ByteArraySerializer; -import org.apache.kafka.common.serialization.Serializer; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.kafka.core.DefaultKafkaProducerFactory; -import org.springframework.kafka.core.KafkaTemplate; -import org.springframework.kafka.core.ProducerFactory; -import org.springframework.kafka.test.rule.EmbeddedKafkaRule; -import org.springframework.kafka.test.utils.KafkaTestUtils; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; - -@Slf4j -@RunWith(SpringRunner.class) -@SpringBootTest -@DirtiesContext -public class KafkaFeatureSourceTest { - - @ClassRule - public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(1, true, "TEST_TOPIC"); - @Rule - public TestPipeline pipeline = TestPipeline.create(); - @Autowired - private KafkaTemplate template; - - - public void send(FeatureRow... rows) { - for (FeatureRow row : rows) { - try { - log.info("Sent: " + template.send("TEST_TOPIC", row).get().toString()); - } catch (InterruptedException e) { - e.printStackTrace(); - } catch (ExecutionException e) { - e.printStackTrace(); - } - } - } - - @Test - public void testFoo() throws ExecutionException, InterruptedException { - String server = embeddedKafka.getEmbeddedKafka().getBrokerAddresses()[0].toString(); - ImportSpec importSpec = ImportSpec.newBuilder().setType("kafka") - .addEntities("testEntity") - .putSourceOptions("topics", "TEST_TOPIC") - .putSourceOptions("server", server) - .putJobOptions("sample.limit", "1") - .build(); - FeatureRow row = FeatureRow.newBuilder().setEntityKey("key").build(); - ScheduledExecutorService scheduler = - Executors.newScheduledThreadPool(1); - // we keep sending on loop because beam will only start consuming rows that were sent after startup. - scheduler.scheduleAtFixedRate(() -> send(row), 0, 1, TimeUnit.SECONDS); - - PCollection rows = pipeline.apply(new KafkaFeatureSource(importSpec)); - Assert.assertEquals(IsBounded.BOUNDED, rows.isBounded()); - PAssert.that(rows).containsInAnyOrder(row); - pipeline.run(); - } - - public Producer getProducer() { - Map producerProps = - KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka()); - return new DefaultKafkaProducerFactory<>(producerProps, new ByteArraySerializer(), - new FeatureRowSerializer()).createProducer(); - } - - - public static class FeatureRowSerializer implements Serializer { - - @Override - public void configure(Map configs, boolean isKey) { - - } - - @Override - public byte[] serialize(String topic, FeatureRow data) { - return data.toByteArray(); - } - - @Override - public void close() { - - } - } - - @Configuration - static class ContextConfiguration { - - @Bean - ProducerFactory producerFactory() { - Map producerProps = - KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka()); - - return new DefaultKafkaProducerFactory<>( - producerProps, new ByteArraySerializer(), new FeatureRowSerializer()); - } - - @Bean - KafkaTemplate kafkaTemplate() { - return new KafkaTemplate<>(producerFactory(), true); - } - } -} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/source/pubsub/PubSubReadOptionsTest.java b/ingestion/src/test/java/feast/source/pubsub/PubSubReadOptionsTest.java deleted file mode 100644 index ece96942d3d..00000000000 --- a/ingestion/src/test/java/feast/source/pubsub/PubSubReadOptionsTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.source.pubsub; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import feast.options.OptionsParser; -import feast.source.pubsub.PubSubFeatureSource.MessageFormat; -import feast.source.pubsub.PubSubFeatureSource.PubSubReadOptions; -import java.util.HashMap; -import java.util.Map; -import org.junit.Test; - -public class PubSubReadOptionsTest { - - @Test - public void testParseSubscription() { - Map opts = new HashMap<>(); - opts.put("subscription", "foo"); - - PubSubReadOptions options = OptionsParser.parse(opts, PubSubReadOptions.class); - assertFalse(options.discardUnknownFeatures); - assertEquals(options.messageFormat, MessageFormat.FEATURE_ROW); // default format - } - - @Test - public void testParseSubscriptionWithCsvFormat() { - Map opts = new HashMap<>(); - opts.put("subscription", "foo"); - opts.put("messageFormat", "csv"); - - PubSubReadOptions options = OptionsParser.parse(opts, PubSubReadOptions.class); - assertFalse(options.discardUnknownFeatures); - assertEquals(options.messageFormat, MessageFormat.CSV); - } - - @Test - public void testParseSubscriptionWithJsonFormat() { - Map opts = new HashMap<>(); - opts.put("subscription", "foo"); - opts.put("messageFormat", "json"); - - PubSubReadOptions options = OptionsParser.parse(opts, PubSubReadOptions.class); - assertFalse(options.discardUnknownFeatures); - assertEquals(options.messageFormat, MessageFormat.JSON); - } - - @Test - public void testParseSubscriptionWithFeatureRowFormat() { - Map opts = new HashMap<>(); - opts.put("subscription", "foo"); - opts.put("messageFormat", "featureRow"); - - PubSubReadOptions options = OptionsParser.parse(opts, PubSubReadOptions.class); - assertFalse(options.discardUnknownFeatures); - assertEquals(options.messageFormat, MessageFormat.FEATURE_ROW); - } -} diff --git a/ingestion/src/test/java/feast/store/FileStoreOptionsTest.java b/ingestion/src/test/java/feast/store/FileStoreOptionsTest.java deleted file mode 100644 index 0d8e06182c6..00000000000 --- a/ingestion/src/test/java/feast/store/FileStoreOptionsTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import static org.junit.Assert.assertEquals; - -import feast.store.FileStoreOptions; -import org.joda.time.Duration; -import org.junit.Test; -import feast.options.OptionsParser; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class FileStoreOptionsTest { - - @Test - public void testOptions() { - FileStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder().put("path", "/tmp/asdg").build(), - FileStoreOptions.class); - assertEquals("/tmp/asdg", options.path); - assertEquals(FileStoreOptions.DEFAULT_WINDOW_SIZE, options.getWindowDuration()); - } - - @Test(expected = IllegalArgumentException.class) - public void testOptionsInvalidWindowSize() { - FileStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("path", "/tmp/asdg") - .put("windowSize", "dsaf") - .build(), - FileStoreOptions.class); - } - - @Test - public void testOptionsValidWindowSize() { - FileStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("path", "/tmp/asdg") - .put("windowSize", "PT1H") - .build(), - FileStoreOptions.class); - assertEquals("/tmp/asdg", options.path); - assertEquals(Duration.standardHours(1), options.getWindowDuration()); - } -} diff --git a/ingestion/src/test/java/feast/store/MockFeatureErrorsFactory.java b/ingestion/src/test/java/feast/store/MockFeatureErrorsFactory.java deleted file mode 100644 index 49721147a60..00000000000 --- a/ingestion/src/test/java/feast/store/MockFeatureErrorsFactory.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import com.google.auto.service.AutoService; -import feast.store.errors.FeatureErrorsFactory; - -@AutoService(FeatureErrorsFactory.class) -public class MockFeatureErrorsFactory extends MockFeatureStore implements - FeatureErrorsFactory { - public static final String MOCK_ERRORS_STORE_TYPE = "errors.mock"; - - public MockFeatureErrorsFactory() { - super(MOCK_ERRORS_STORE_TYPE); - } -} diff --git a/ingestion/src/test/java/feast/store/MockFeatureStore.java b/ingestion/src/test/java/feast/store/MockFeatureStore.java deleted file mode 100644 index 7a5ebe8ee59..00000000000 --- a/ingestion/src/test/java/feast/store/MockFeatureStore.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import feast.ingestion.model.Specs; -import feast.specs.StorageSpecProto.StorageSpec; -import lombok.Getter; - -public class MockFeatureStore implements FeatureStoreFactory { - - private final String type; - @Getter - private MockTransforms.Write write; - - public MockFeatureStore(String type) { - this.type = type; - } - - @Override - public MockTransforms.Write create(StorageSpec storageSpec, Specs specs) { - write = new MockTransforms.Write(storageSpec); - return write; - } - - @Override - public String getType() { - return type; - } -} diff --git a/ingestion/src/test/java/feast/store/MockServingFactory.java b/ingestion/src/test/java/feast/store/MockServingFactory.java deleted file mode 100644 index 125fdfdb79d..00000000000 --- a/ingestion/src/test/java/feast/store/MockServingFactory.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import com.google.auto.service.AutoService; -import feast.store.serving.FeatureServingFactory; - -@AutoService(FeatureServingFactory.class) -public class MockServingFactory extends MockFeatureStore implements - FeatureServingFactory { - - public static final String MOCK_SERVING_STORE_TYPE = "serving.mock"; - - public MockServingFactory() { - super(MOCK_SERVING_STORE_TYPE); - } -} diff --git a/ingestion/src/test/java/feast/store/MockTransforms.java b/ingestion/src/test/java/feast/store/MockTransforms.java deleted file mode 100644 index 7d241e77eed..00000000000 --- a/ingestion/src/test/java/feast/store/MockTransforms.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import com.google.common.collect.Lists; -import feast.ingestion.transform.fn.Identity; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import java.util.List; -import lombok.Getter; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - -public class MockTransforms { - - @Getter - public static class Write extends FeatureStoreWrite { - - List> inputs = Lists.newArrayList(); - private StorageSpec spec; - - public Write() { - } - - public Write(StorageSpec spec) { - this.spec = spec; - } - - /** - * Keep input around for testing and apply identity because PTransforms have to do something, or - * the pipeline hangs at execution time - */ - @Override - public PDone expand(PCollection input) { - this.inputs.add(input.apply(getName(), ParDo.of(new Identity(getName())))); - return PDone.in(input.getPipeline()); - } - } -} diff --git a/ingestion/src/test/java/feast/store/MockWarehouseFactory.java b/ingestion/src/test/java/feast/store/MockWarehouseFactory.java deleted file mode 100644 index 05bf86ead98..00000000000 --- a/ingestion/src/test/java/feast/store/MockWarehouseFactory.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import com.google.auto.service.AutoService; -import feast.store.warehouse.FeatureWarehouseFactory; - -@AutoService(FeatureWarehouseFactory.class) -public class MockWarehouseFactory extends MockFeatureStore implements - FeatureWarehouseFactory { - - public static final String MOCK_WAREHOUSE_STORE_TYPE = "warehouse.mock"; - - public MockWarehouseFactory() { - super(MOCK_WAREHOUSE_STORE_TYPE); - } -} diff --git a/ingestion/src/test/java/feast/store/TextFileDynamicIOTest.java b/ingestion/src/test/java/feast/store/TextFileDynamicIOTest.java deleted file mode 100644 index 0a52b4bc788..00000000000 --- a/ingestion/src/test/java/feast/store/TextFileDynamicIOTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertThat; - -import com.google.common.collect.Lists; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -@Slf4j -public class TextFileDynamicIOTest { - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - @Rule - public TestPipeline testPipeline = TestPipeline.create(); - - @Test - public void testWrite() throws IOException { - File path = folder.newFolder(); - - FileStoreOptions options = new FileStoreOptions(); - options.jobName = "test-job"; - options.path = path.getAbsolutePath(); - - TextFileDynamicIO.Write write = - new TextFileDynamicIO.Write( - options, ".text"); - - PCollection> rowExtended = - testPipeline.apply(Create.of( - KV.of("part1", "line1"), - KV.of("part1", "line2"), - KV.of("part2", "line3"), - KV.of("part2", "line4"))); - rowExtended.apply(write); - - testPipeline.run(); - - List part1 = getAllLines(path.toPath().resolve("test-job/part1")); - List part2 = getAllLines(path.toPath().resolve("test-job/part2")); - assertThat(part1, containsInAnyOrder("line1", "line2")); - assertThat(part2, containsInAnyOrder("line3", "line4")); - } - - List getAllLines(Path path) throws IOException { - List files = Files.walk(path).collect(Collectors.toList()); - List lines = Lists.newArrayList(); - for (Path file : files) { - System.out.println(file); - if (file.toFile().isFile()) { - lines.addAll(Files.readAllLines(file)); - } - } - return lines; - } -} diff --git a/ingestion/src/test/java/feast/store/serving/bigtable/BigTableFeatureOptionsTest.java b/ingestion/src/test/java/feast/store/serving/bigtable/BigTableFeatureOptionsTest.java deleted file mode 100644 index c399174d1e7..00000000000 --- a/ingestion/src/test/java/feast/store/serving/bigtable/BigTableFeatureOptionsTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import feast.options.OptionsParser; -import org.junit.Assert; -import org.junit.Test; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class BigTableFeatureOptionsTest { - - @Test - public void testParse() { - BigTableFeatureOptions options = - OptionsParser.parse( - ImmutableMap.builder().put("bigtable.family", "family1").build(), - BigTableFeatureOptions.class); - Assert.assertEquals("family1", options.family); - } - - @Test - public void testParseNoFamily() { - BigTableFeatureOptions options = - OptionsParser.parse( - ImmutableMap.builder().build(), - BigTableFeatureOptions.class); - Assert.assertEquals(null, options.family); - } -} diff --git a/ingestion/src/test/java/feast/store/serving/bigtable/BigTableStoreOptionsTest.java b/ingestion/src/test/java/feast/store/serving/bigtable/BigTableStoreOptionsTest.java deleted file mode 100644 index b9957a2b453..00000000000 --- a/ingestion/src/test/java/feast/store/serving/bigtable/BigTableStoreOptionsTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.bigtable; - -import org.junit.Assert; -import org.junit.Test; -import feast.options.OptionsParser; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class BigTableStoreOptionsTest { - - @Test - public void testParse() { - BigTableStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("instance", "instance1") - .put("prefix", "prefix_") - .build(), - BigTableStoreOptions.class); - Assert.assertEquals("project1", options.project); - Assert.assertEquals("instance1", options.instance); - Assert.assertEquals("prefix_", options.prefix); - } - - @Test - public void testParseNoPrefix() { - BigTableStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("instance", "instance1") - .build(), - BigTableStoreOptions.class); - Assert.assertEquals("project1", options.project); - Assert.assertEquals("instance1", options.instance); - Assert.assertNull(options.prefix); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoProject() { - OptionsParser.parse( - ImmutableMap.builder() - .put("instance", "instance1") - .put("prefix", "prefix_") - .build(), - BigTableStoreOptions.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoInstance() { - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("prefix", "prefix_") - .build(), - BigTableStoreOptions.class); - } -} diff --git a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java b/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java deleted file mode 100644 index e7e893fbf31..00000000000 --- a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowRedisIOWriteTest.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import static feast.store.serving.redis.FeatureRowToRedisMutationDoFn.getFeatureIdSha1Prefix; -import static feast.store.serving.redis.FeatureRowToRedisMutationDoFn.getRedisBucketKey; -import static org.junit.Assert.assertEquals; - -import com.google.common.io.Resources; -import com.google.protobuf.Timestamp; -import feast.ingestion.config.ImportJobSpecsSupplier; -import feast.ingestion.model.Features; -import feast.ingestion.model.Specs; -import feast.ingestion.model.Values; -import feast.ingestion.util.DateUtil; -import feast.specs.ImportJobSpecsProto.ImportJobSpecs; -import feast.specs.ImportSpecProto.Field; -import feast.specs.ImportSpecProto.ImportSpec; -import feast.specs.ImportSpecProto.Schema; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.storage.RedisProto.RedisBucketKey; -import feast.storage.RedisProto.RedisBucketValue; -import feast.store.FeatureStoreWrite; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.joda.time.DateTime; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import redis.clients.jedis.Jedis; -import redis.embedded.Redis; -import redis.embedded.RedisServer; - -public class FeatureRowRedisIOWriteTest { - - private static final String featureInt32 = "testEntity.testInt32"; - private static final String featureString = "testEntity.testString"; - - private static int REDIS_PORT = 51234; - private static Redis redis; - private static Jedis jedis; - private static ImportJobSpecs importJobSpecs; - - @Rule - public TestPipeline testPipeline = TestPipeline.create(); - - @BeforeClass - public static void classSetup() throws IOException { - redis = new RedisServer(REDIS_PORT); - redis.start(); - Path path = Paths.get(Resources.getResource("specs/").getPath()); - importJobSpecs = new ImportJobSpecsSupplier(path.toString()).get(); - jedis = new Jedis("localhost", REDIS_PORT); - } - - @AfterClass - public static void teardown() { - redis.stop(); - } - - Specs getSpecs() { - Specs specs = Specs.of( - "test job", - importJobSpecs.toBuilder().setImportSpec(ImportSpec.newBuilder() - .addEntities("testEntity") - .setSchema( - Schema.newBuilder() - .addFields(Field.newBuilder().setFeatureId(featureInt32)) - .addFields(Field.newBuilder().setFeatureId(featureString))) - ).setServingStorageSpec(StorageSpec.newBuilder() - .setId("REDIS1").setType("redis") - .putOptions("port", String.valueOf(REDIS_PORT)) - .putOptions("host", "localhost") - .putOptions("batchSize", "1") - .putOptions("timeout", "2000") - .build()).build()); - specs.validate(); - return specs; - } - - @Test - public void testWrite() throws IOException { - - Specs specs = getSpecs(); - specs.validate(); - new RedisServingFactory().create(specs.getServingStorageSpec(), specs); - FeatureRowRedisIO.Write write = - new FeatureRowRedisIO.Write( - RedisStoreOptions.builder().host("localhost").port(REDIS_PORT).build(), specs); - - Timestamp now = DateUtil.toTimestamp(DateTime.now()); - - FeatureRowExtended rowExtended = - FeatureRowExtended.newBuilder() - .setRow( - FeatureRow.newBuilder() - .setEntityName("testEntity") - .setEntityKey("1") - .setEventTimestamp(now) - .addFeatures(Features.of(featureInt32, Values.ofInt32(1))) - .addFeatures(Features.of(featureString, Values.ofString("a")))) - .build(); - - PCollection input = testPipeline.apply(Create.of(rowExtended)); - - input.apply("write to embedded redis", write); - - testPipeline.run(); - - RedisBucketKey featureInt32Key = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureInt32), 0L); - RedisBucketKey featureStringKey = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureString), 0L); - - RedisBucketValue featureInt32Value = - RedisBucketValue.parseFrom(jedis.get(featureInt32Key.toByteArray())); - RedisBucketValue featureStringValue = - RedisBucketValue.parseFrom(jedis.get(featureStringKey.toByteArray())); - - assertEquals(Values.ofInt32(1), featureInt32Value.getValue()); - assertEquals(now, featureInt32Value.getEventTimestamp()); - assertEquals(Values.ofString("a"), featureStringValue.getValue()); - assertEquals(now, featureStringValue.getEventTimestamp()); - } - - @Test - public void testWriteFromOptions() throws IOException { - Specs specs = getSpecs(); - FeatureStoreWrite write = new RedisServingFactory() - .create(specs.getServingStorageSpec(), specs); - - Timestamp now = DateUtil.toTimestamp(DateTime.now()); - FeatureRowExtended rowExtended = - FeatureRowExtended.newBuilder() - .setRow( - FeatureRow.newBuilder() - .setEntityName("testEntity") - .setEntityKey("1") - .setEventTimestamp(now) - .addFeatures(Features.of(featureInt32, Values.ofInt32(1))) - .addFeatures(Features.of(featureString, Values.ofString("a")))) - .build(); - - PCollection input = testPipeline.apply(Create.of(rowExtended)); - - input.apply("write to embedded redis", write); - - testPipeline.run(); - - RedisBucketKey featureInt32Key = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureInt32), 0L); - RedisBucketKey featureStringKey = - getRedisBucketKey("1", getFeatureIdSha1Prefix(featureString), 0L); - - RedisBucketValue featureInt32Value = - RedisBucketValue.parseFrom(jedis.get(featureInt32Key.toByteArray())); - RedisBucketValue featureStringValue = - RedisBucketValue.parseFrom(jedis.get(featureStringKey.toByteArray())); - - assertEquals(Values.ofInt32(1), featureInt32Value.getValue()); - assertEquals(now, featureInt32Value.getEventTimestamp()); - assertEquals(Values.ofString("a"), featureStringValue.getValue()); - assertEquals(now, featureStringValue.getEventTimestamp()); - } -} diff --git a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFnTest.java b/ingestion/src/test/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFnTest.java deleted file mode 100644 index 64bc4014268..00000000000 --- a/ingestion/src/test/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFnTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import feast.ingestion.util.DateUtil; -import feast.storage.RedisProto.RedisBucketKey; -import org.apache.beam.sdk.testing.NeedsRunner; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.commons.codec.digest.DigestUtils; -import org.joda.time.DateTime; -import org.joda.time.Duration; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -public class FeatureRowToRedisMutationDoFnTest { - - @Rule - public final transient TestPipeline pipeline = TestPipeline.create(); - - @Test - public void testRedisBucketKeySize() { - Duration bucketSize = Duration.standardMinutes(20); - DateTime dt = new DateTime(2018, 6, 22, 4, 15, 26); - int bucketId = - Math.toIntExact(DateUtil.toTimestamp(dt).getSeconds() / bucketSize.getStandardSeconds()); - String featureIdHash = DigestUtils.sha1Hex("driver.seconds.pings_v1").substring(0, 7); - RedisBucketKey key = - RedisBucketKey.newBuilder() - .setEntityKey(String.valueOf(12345678)) - .setFeatureIdSha1Prefix(featureIdHash) - .setBucketId(bucketId) - .build(); - System.out.println(key.toByteArray().length); - } - - @Test - @Category(NeedsRunner.class) - public void testOutputMutationPerFeature() { - // TODO - } -} diff --git a/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java new file mode 100644 index 00000000000..d0c87434933 --- /dev/null +++ b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java @@ -0,0 +1,79 @@ +package feast.store.serving.redis; + +import static feast.test.TestUtil.field; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +import feast.storage.RedisProto.RedisKey; +import feast.store.serving.redis.RedisCustomIO.Method; +import feast.store.serving.redis.RedisCustomIO.RedisMutation; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.ValueProto.ValueType.Enum; +import java.io.IOException; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.Create; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import redis.clients.jedis.Jedis; +import redis.embedded.Redis; +import redis.embedded.RedisServer; + +public class RedisCustomIOTest { + + @Rule + public transient TestPipeline p = TestPipeline.create(); + + private static int REDIS_PORT = 51234; + private static Redis redis; + private static Jedis jedis; + + + @BeforeClass + public static void setUp() throws IOException { + redis = new RedisServer(REDIS_PORT); + redis.start(); + jedis = new Jedis("localhost", REDIS_PORT); + } + + @AfterClass + public static void teardown() { + redis.stop(); + } + + @Test + public void shouldWriteToRedis() { + HashMap kvs = new LinkedHashMap<>(); + kvs.put(RedisKey.newBuilder().setFeatureSet("fs:1") + .addEntities(field("entity", 1, Enum.INT64)).build(), + FeatureRow.newBuilder().setFeatureSet("fs:1") + .addFields(field("entity", 1, Enum.INT64)) + .addFields(field("feature", "one", Enum.STRING)).build()); + kvs.put(RedisKey.newBuilder().setFeatureSet("fs:1") + .addEntities(field("entity", 2, Enum.INT64)).build(), + FeatureRow.newBuilder().setFeatureSet("fs:1") + .addFields(field("entity", 2, Enum.INT64)) + .addFields(field("feature", "two", Enum.STRING)).build()); + + List featureRowWrites = kvs.entrySet().stream() + .map(kv -> new RedisMutation(Method.SET, kv.getKey().toByteArray(), + kv.getValue().toByteArray(), + null, null) + ) + .collect(Collectors.toList()); + + p.apply(Create.of(featureRowWrites)) + .apply(RedisCustomIO.write("localhost", REDIS_PORT)); + p.run(); + + kvs.forEach((key, value) -> { + byte[] actual = jedis.get(key.toByteArray()); + assertThat(actual, equalTo(value.toByteArray())); + }); + } +} \ No newline at end of file diff --git a/ingestion/src/test/java/feast/store/serving/redis/RedisFeatureOptionsTest.java b/ingestion/src/test/java/feast/store/serving/redis/RedisFeatureOptionsTest.java deleted file mode 100644 index 4f6246d1c4a..00000000000 --- a/ingestion/src/test/java/feast/store/serving/redis/RedisFeatureOptionsTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import org.joda.time.Duration; -import org.junit.Assert; -import org.junit.Test; -import feast.options.OptionsParser; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class RedisFeatureOptionsTest { - - @Test - public void testParse() { - RedisFeatureOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("redis.expiry", "PT6H") - .build(), - RedisFeatureOptions.class); - Assert.assertEquals("PT6H", options.expiry); - Assert.assertEquals(Duration.standardHours(6), options.getExpiryDuration()); - } - - - @Test - public void testParseNoExpiry() { - RedisFeatureOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .build(), - RedisFeatureOptions.class); - Assert.assertEquals(RedisFeatureOptions.DEFAULT_EXPIRY, options.expiry); - Assert.assertEquals(Duration.ZERO, options.getExpiryDuration()); - } -} diff --git a/ingestion/src/test/java/feast/store/serving/redis/RedisStoreOptionsTest.java b/ingestion/src/test/java/feast/store/serving/redis/RedisStoreOptionsTest.java deleted file mode 100644 index 335a3b1fedf..00000000000 --- a/ingestion/src/test/java/feast/store/serving/redis/RedisStoreOptionsTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.serving.redis; - -import static org.junit.Assert.assertEquals; - -import feast.options.OptionsParser; -import java.util.HashMap; -import java.util.Map; -import org.junit.Test; - -public class RedisStoreOptionsTest { - - @Test - public void testParse() { - Map map = new HashMap<>(); - map.put("host", "localhost"); - map.put("port", "1234"); - - RedisStoreOptions opts = OptionsParser.parse(map, RedisStoreOptions.class); - assertEquals("localhost", opts.host); - assertEquals(1234, (int) opts.port); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseHost() { - Map map = new HashMap<>(); - map.put("port", "1234"); - OptionsParser.parse(map, RedisStoreOptions.class); - } - - @Test(expected = IllegalArgumentException.class) - public void testParseNoPort() { - Map map = new HashMap<>(); - map.put("host", "localhost"); - OptionsParser.parse(map, RedisStoreOptions.class); - } -} diff --git a/ingestion/src/test/java/feast/store/warehouse/bigquery/BigQueryStoreOptionsTest.java b/ingestion/src/test/java/feast/store/warehouse/bigquery/BigQueryStoreOptionsTest.java deleted file mode 100644 index 4563d3e6fb9..00000000000 --- a/ingestion/src/test/java/feast/store/warehouse/bigquery/BigQueryStoreOptionsTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import org.junit.Assert; -import org.junit.Test; -import feast.options.OptionsParser; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -public class BigQueryStoreOptionsTest { - - @Test - public void testParse() { - BigQueryStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("dataset", "dataset1") - .put("tempLocation", "/tmp/foobar") - .build(), - BigQueryStoreOptions.class); - - Assert.assertEquals("project1", options.project); - Assert.assertEquals("dataset1", options.dataset); - Assert.assertEquals("/tmp/foobar", options.tempLocation); - } - - @Test - public void testParseNoTempLocation() { - BigQueryStoreOptions options = - OptionsParser.parse( - ImmutableMap.builder() - .put("project", "project1") - .put("dataset", "dataset1") - .build(), - BigQueryStoreOptions.class); - - Assert.assertEquals("project1", options.project); - Assert.assertEquals("dataset1", options.dataset); - Assert.assertNull(options.tempLocation); - } -} diff --git a/ingestion/src/test/java/feast/store/warehouse/bigquery/ValueBigQueryBuilderTest.java b/ingestion/src/test/java/feast/store/warehouse/bigquery/ValueBigQueryBuilderTest.java deleted file mode 100644 index 648fa5f151e..00000000000 --- a/ingestion/src/test/java/feast/store/warehouse/bigquery/ValueBigQueryBuilderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.bigquery; - -import static junit.framework.TestCase.assertEquals; -import static junit.framework.TestCase.assertTrue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -import com.google.protobuf.ByteString; -import com.google.protobuf.Timestamp; -import java.nio.charset.StandardCharsets; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import feast.ingestion.util.DateUtil; -import feast.types.ValueProto.Value; - -public class ValueBigQueryBuilderTest { - - @Rule public TemporaryFolder folder = new TemporaryFolder(); - - @Test - public void testINT64IsConvertedToLong() { - Value value = Value.newBuilder().setInt64Val(1234L).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(Long.class.isInstance(actual)); - assertEquals(1234L, actual); - } - - @Test - public void testINT32IsConvertedToLong() { - Value value = Value.newBuilder().setInt32Val(1234).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(Long.class.isInstance(actual)); - assertEquals(1234L, actual); - } - - @Test - public void testSTRINGIsConvertedToString() { - Value value = Value.newBuilder().setStringVal("abcd").build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(String.class.isInstance(actual)); - assertEquals("abcd", actual); - } - - @Test - public void testTIMESTAMPIsConvertedToString() { - Timestamp timestamp = DateUtil.toTimestamp("2017-01-01T13:14:15.123Z"); - Value value = Value.newBuilder().setTimestampVal(timestamp).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(String.class.isInstance(actual)); - assertEquals("2017-01-01T13:14:15.123Z", actual); - } - - @Test - public void testBOOLIsConvertedToString() { - Value value = Value.newBuilder().setBoolVal(true).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(Boolean.class.isInstance(actual)); - assertEquals(true, actual); - } - - @Test - public void testDOUBLEIsConvertedToDouble() { - Value value = Value.newBuilder().setDoubleVal(1.123456767890123456789).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(Double.class.isInstance(actual)); - assertEquals(1.123456767890123456789, actual); - } - - @Test - public void testFLOATIsConvertedToDouble() { - Value value = Value.newBuilder().setFloatVal(1.123456767890123456789f).build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(Double.class.isInstance(actual)); - assertEquals(1.1234567f, ((Double) actual).floatValue()); - } - - @Test - public void testBYTESIsConvertedToByteBuffer() { - Value value = - Value.newBuilder() - .setBytesVal(ByteString.copyFrom("to bytes!".getBytes(StandardCharsets.UTF_8))) - .build(); - Object actual = ValueBigQueryBuilder.bigQueryObjectOf(value); - assertTrue(byte[].class.isInstance(actual)); - assertThat("to bytes!".getBytes(StandardCharsets.UTF_8), is(equalTo(actual))); - } -} diff --git a/ingestion/src/test/java/feast/store/warehouse/json/JsonFileFeatureIOTest.java b/ingestion/src/test/java/feast/store/warehouse/json/JsonFileFeatureIOTest.java deleted file mode 100644 index 97e09f9e7f3..00000000000 --- a/ingestion/src/test/java/feast/store/warehouse/json/JsonFileFeatureIOTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.store.warehouse.json; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.junit.Assert.assertThat; - -import com.google.common.collect.Lists; -import feast.store.FileStoreOptions; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -@Slf4j -public class JsonFileFeatureIOTest { - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - @Rule - public TestPipeline testPipeline = TestPipeline.create(); - - @Test - public void testWrite() throws IOException { - File path = folder.newFolder(); - - FileStoreOptions options = new FileStoreOptions(); - options.jobName = "test-job"; - options.path = path.getAbsolutePath(); - - JsonFileWarehouseWrite write = new JsonFileWarehouseWrite(options); - - PCollection rowExtended = - testPipeline.apply( - Create.of( - FeatureRowExtended.newBuilder() - .setRow( - FeatureRow.newBuilder().setEntityName("testEntity1").setEntityKey("1234")) - .build(), - FeatureRowExtended.newBuilder() - .setRow( - FeatureRow.newBuilder().setEntityName("testEntity2").setEntityKey("1234")) - .build())); - rowExtended.apply(write); - - testPipeline.run(); - - for (Path p : Files.walk(path.toPath()).collect(Collectors.toList())) { - if (p.toFile().isFile()) { - log.debug(p.toString()); - } - } - List part1 = getAllLines(path.toPath().resolve("test-job/testEntity1")); - List part2 = getAllLines(path.toPath().resolve("test-job/testEntity2")); - assertThat(part1, - containsInAnyOrder("{\"entityKey\":\"1234\",\"entityName\":\"testEntity1\"}")); - assertThat(part2, - containsInAnyOrder("{\"entityKey\":\"1234\",\"entityName\":\"testEntity2\"}")); - } - - List getAllLines(Path path) throws IOException { - List files = Files.walk(path).collect(Collectors.toList()); - List lines = Lists.newArrayList(); - for (Path file : files) { - System.out.println(file); - if (file.toFile().isFile()) { - lines.addAll(Files.readAllLines(file)); - } - } - return lines; - } -} diff --git a/ingestion/src/test/java/feast/test/TestUtil.java b/ingestion/src/test/java/feast/test/TestUtil.java new file mode 100644 index 00000000000..b0fa4cbd10f --- /dev/null +++ b/ingestion/src/test/java/feast/test/TestUtil.java @@ -0,0 +1,308 @@ +package feast.test; + +import com.google.protobuf.ByteString; +import com.google.protobuf.util.Timestamps; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.storage.RedisProto.RedisKey; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FeatureRowProto.FeatureRow.Builder; +import feast.types.FieldProto.Field; +import feast.types.ValueProto.BoolList; +import feast.types.ValueProto.BytesList; +import feast.types.ValueProto.DoubleList; +import feast.types.ValueProto.FloatList; +import feast.types.ValueProto.Int32List; +import feast.types.ValueProto.Int64List; +import feast.types.ValueProto.StringList; +import feast.types.ValueProto.Value; +import feast.types.ValueProto.ValueType; +import java.io.IOException; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import kafka.server.KafkaConfig; +import kafka.server.KafkaServerStartable; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.LongSerializer; +import org.apache.zookeeper.server.ServerConfig; +import org.apache.zookeeper.server.ZooKeeperServerMain; +import redis.embedded.RedisServer; + +@SuppressWarnings("WeakerAccess") +public class TestUtil { + + public static class LocalRedis { + + private static RedisServer server; + + /** + * Start local Redis for used in testing at "localhost" + * + * @param port port number + * @throws IOException if Redis failed to start + */ + public static void start(int port) throws IOException { + server = new RedisServer(port); + server.start(); + } + + public static void stop() { + if (server != null) { + server.stop(); + } + } + } + + public static class LocalKafka { + + private static KafkaServerStartable server; + + /** + * Start local Kafka and (optionally) Zookeeper + * + * @param kafkaHost e.g. localhost + * @param kafkaPort e.g. 60001 + * @param kafkaReplicationFactor e.g. 1 + * @param zookeeperHost e.g. localhost + * @param zookeeperPort e.g. 60002 + * @param zookeeperDataDir e.g. "/tmp" or "Files.createTempDir().getAbsolutePath()" + */ + public static void start(String kafkaHost, int kafkaPort, short kafkaReplicationFactor, + boolean startZookeper, String zookeeperHost, int zookeeperPort, String zookeeperDataDir) + throws InterruptedException { + if (startZookeper) { + LocalZookeeper.start(zookeeperPort, zookeeperDataDir); + Thread.sleep(5000); + } + Properties kafkaProp = new Properties(); + kafkaProp.put("zookeeper.connect", zookeeperHost + ":" + zookeeperPort); + kafkaProp.put("host.name", kafkaHost); + kafkaProp.put("port", kafkaPort); + kafkaProp.put("offsets.topic.replication.factor", kafkaReplicationFactor); + KafkaConfig kafkaConfig = new KafkaConfig(kafkaProp); + server = new KafkaServerStartable(kafkaConfig); + new Thread(server::startup).start(); + } + + public static void stop() { + if (server != null) { + try { + server.shutdown(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } + + /** + * Publish test Feature Row messages to a running Kafka broker + * + * @param bootstrapServers e.g. localhost:9092 + * @param topic e.g. my_topic + * @param messages e.g. list of Feature Row + * @param valueSerializer in Feast this valueSerializer should be "ByteArraySerializer.class" + * @param publishTimeoutSec duration to wait for publish operation (of each message) to succeed + */ + public static void publishFeatureRowsToKafka(String bootstrapServers, String topic, + List messages, Class valueSerializer, long publishTimeoutSec) { + Long defaultKey = 1L; + Properties prop = new Properties(); + prop.put("bootstrap.servers", bootstrapServers); + prop.put("key.serializer", LongSerializer.class); + prop.put("value.serializer", valueSerializer); + Producer producer = new KafkaProducer<>(prop); + + messages.forEach(featureRow -> { + ProducerRecord record = new ProducerRecord<>(topic, defaultKey, + featureRow.toByteArray()); + try { + producer.send(record).get(publishTimeoutSec, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + e.printStackTrace(); + } + }); + } + + /** + * Create a Feature Row with random value according to the FeatureSetSpec + * + *

See {@link #createRandomFeatureRow(FeatureSetSpec, int)} + */ + public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec) { + ThreadLocalRandom random = ThreadLocalRandom.current(); + int randomStringSizeMaxSize = 12; + return createRandomFeatureRow(spec, random.nextInt(0, randomStringSizeMaxSize) + 4); + } + + /** + * Create a Feature Row with random value according to the FeatureSetSpec. + * + *

The Feature Row created contains fields according to the entities and features + * defined in FeatureSetSpec, matching the value type of the field, with randomized value for + * testing. + * + * @param spec {@link FeatureSetSpec} + * @param randomStringSize number of characters for the generated random string + * @return {@link FeatureRow} + */ + public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec, int randomStringSize) { + Builder builder = FeatureRow.newBuilder() + .setFeatureSet(spec.getName() + ":" + spec.getVersion()) + .setEventTimestamp(Timestamps.fromMillis(System.currentTimeMillis())); + + spec.getEntitiesList().forEach(field -> { + builder.addFields(Field.newBuilder() + .setName(field.getName()) + .setValue(createRandomValue(field.getValueType(), randomStringSize)) + .build()); + }); + + spec.getFeaturesList().forEach(field -> { + builder.addFields(Field.newBuilder() + .setName(field.getName()) + .setValue(createRandomValue(field.getValueType(), randomStringSize)) + .build()); + }); + + return builder.build(); + } + + /** + * Create a random Feast {@link Value} of {@link ValueType.Enum}. + * + * @param type {@link ValueType.Enum} + * @param randomStringSize number of characters for the generated random string + * @return {@link Value} + */ + public static Value createRandomValue(ValueType.Enum type, int randomStringSize) { + Value.Builder builder = Value.newBuilder(); + ThreadLocalRandom random = ThreadLocalRandom.current(); + + switch (type) { + case INVALID: + case UNRECOGNIZED: + throw new IllegalArgumentException("Invalid ValueType: " + type); + case BYTES: + builder.setBytesVal( + ByteString.copyFrom(RandomStringUtils.randomAlphanumeric(randomStringSize).getBytes())); + break; + case STRING: + builder.setStringVal(RandomStringUtils.randomAlphanumeric(randomStringSize)); + break; + case INT32: + builder.setInt32Val(random.nextInt()); + break; + case INT64: + builder.setInt64Val(random.nextLong()); + break; + case DOUBLE: + builder.setDoubleVal(random.nextDouble()); + break; + case FLOAT: + builder.setFloatVal(random.nextFloat()); + break; + case BOOL: + builder.setBoolVal(random.nextBoolean()); + break; + case BYTES_LIST: + builder.setBytesListVal(BytesList.newBuilder() + .addVal(ByteString + .copyFrom(RandomStringUtils.randomAlphanumeric(randomStringSize).getBytes())) + .build()); + break; + case STRING_LIST: + builder.setStringListVal( + StringList.newBuilder().addVal(RandomStringUtils.randomAlphanumeric(randomStringSize)) + .build()); + break; + case INT32_LIST: + builder.setInt32ListVal(Int32List.newBuilder().addVal(random.nextInt()).build()); + break; + case INT64_LIST: + builder.setInt64ListVal(Int64List.newBuilder().addVal(random.nextLong()).build()); + break; + case DOUBLE_LIST: + builder.setDoubleListVal(DoubleList.newBuilder().addVal(random.nextDouble()).build()); + break; + case FLOAT_LIST: + builder.setFloatListVal(FloatList.newBuilder().addVal(random.nextFloat()).build()); + break; + case BOOL_LIST: + builder.setBoolListVal(BoolList.newBuilder().addVal(random.nextBoolean()).build()); + break; + } + return builder.build(); + } + + /** + * Create {@link RedisKey} from {@link FeatureSetSpec} and {@link FeatureRow}. + * + *

The entities in the created {@link RedisKey} will contain the value with matching + * field name in the {@link FeatureRow} + * + * @param spec {@link FeatureSetSpec} + * @param row {@link FeatureSetSpec} + * @return {@link RedisKey} + */ + public static RedisKey createRedisKey(FeatureSetSpec spec, FeatureRow row) { + RedisKey.Builder builder = RedisKey.newBuilder() + .setFeatureSet(spec.getName() + ":" + spec.getVersion()); + spec.getEntitiesList().forEach(entityField -> row.getFieldsList().stream() + .filter(rowField -> rowField.getName().equals(entityField.getName())).findFirst() + .ifPresent( + builder::addEntities)); + return builder.build(); + } + + private static class LocalZookeeper { + + static void start(int zookeeperPort, String zookeeperDataDir) { + final ZooKeeperServerMain zookeeper = new ZooKeeperServerMain(); + final ServerConfig serverConfig = new ServerConfig(); + serverConfig.parse(new String[]{String.valueOf(zookeeperPort), zookeeperDataDir}); + new Thread( + () -> { + try { + zookeeper.runFromConfig(serverConfig); + } catch (IOException e) { + e.printStackTrace(); + } + }) + .start(); + } + } + + /** + * Create a field object with given name and type. + * + * @param name of the field. + * @param value of the field. Should be compatible with the valuetype given. + * @param valueType type of the field. + * @return Field object + */ + public static Field field(String name, Object value, ValueType.Enum valueType) { + Field.Builder fieldBuilder = Field.newBuilder() + .setName(name); + switch (valueType) { + case INT32: + return fieldBuilder.setValue(Value.newBuilder().setInt32Val((int) value)).build(); + case INT64: + return fieldBuilder.setValue(Value.newBuilder().setInt64Val((int) value)).build(); + case FLOAT: + return fieldBuilder.setValue(Value.newBuilder().setFloatVal((float) value)).build(); + case DOUBLE: + return fieldBuilder.setValue(Value.newBuilder().setDoubleVal((double) value)).build(); + case STRING: + return fieldBuilder.setValue(Value.newBuilder().setStringVal((String) value)).build(); + default: + throw new IllegalStateException("Unexpected valueType: " + value.getClass()); + } + } +} diff --git a/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml b/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml new file mode 100644 index 00000000000..7a1bbc96078 --- /dev/null +++ b/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml b/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml new file mode 100644 index 00000000000..26c1ff28e51 --- /dev/null +++ b/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml @@ -0,0 +1,34 @@ +sourceSpec: + type: NON_EXISTENT_TYPE + options: + bootstrapServers: localhost:9092 + topics: topic1 + +entitySpec: + name: entity1 + description: description for entity1 + +featureSpecs: +- id: entity1.feature1 + name: feature1 + valueType: INT64 + entity: entity1 +- id: entity1.feature2 + name: feature2 + valueType: DOUBLE + entity: entity1 +- id: entity1.feature3 + name: feature3 + valueType: TIMESTAMP + entity: entity1 +- id: entity1.feature4 + name: feature4 + valueType: DOUBLE + entity: entity1 + +sinkStorageSpec: + id: storage1 + type: BIGQUERY + options: + datasetId: dataset1 + projectId: project1 diff --git a/ingestion/src/test/resources/import-job-specs/valid-1.yaml b/ingestion/src/test/resources/import-job-specs/valid-1.yaml new file mode 100644 index 00000000000..80eaa0c5e81 --- /dev/null +++ b/ingestion/src/test/resources/import-job-specs/valid-1.yaml @@ -0,0 +1,34 @@ +sourceSpec: + type: KAFKA + options: + bootstrapServers: localhost:9092 + topics: topic1 + +entitySpec: + name: entity1 + description: description for entity1 + +featureSpecs: +- id: entity1.feature1 + name: feature1 + valueType: INT64 + entity: entity1 +- id: entity1.feature2 + name: feature2 + valueType: DOUBLE + entity: entity1 +- id: entity1.feature3 + name: feature3 + valueType: TIMESTAMP + entity: entity1 +- id: entity1.feature4 + name: feature4 + valueType: DOUBLE + entity: entity1 + +sinkStorageSpec: + id: storage1 + type: BIGQUERY + options: + datasetId: dataset1 + projectId: project1 diff --git a/ingestion/src/test/resources/import-job-specs/valid-2.yaml b/ingestion/src/test/resources/import-job-specs/valid-2.yaml new file mode 100644 index 00000000000..cc8762d424f --- /dev/null +++ b/ingestion/src/test/resources/import-job-specs/valid-2.yaml @@ -0,0 +1,33 @@ +sourceSpec: + type: KAFKA + options: + bootstrapServers: localhost:9092 + topics: topic1 + +entitySpec: + name: entity1 + description: description for entity1 + +featureSpecs: +- id: entity1.feature1 + name: feature1 + valueType: INT64 + entity: entity1 +- id: entity1.feature2 + name: feature2 + valueType: DOUBLE + entity: entity1 +- id: entity1.feature3 + name: feature3 + valueType: TIMESTAMP + entity: entity1 +- id: entity1.feature4 + name: feature4 + valueType: DOUBLE + entity: entity1 + +sinkStorageSpec: + id: storage1 + type: REDIS + options: + host: localhost diff --git a/ingestion/src/test/resources/logback-test.xml b/ingestion/src/test/resources/logback-test.xml index 9a301f3bc63..14fa4086115 100644 --- a/ingestion/src/test/resources/logback-test.xml +++ b/ingestion/src/test/resources/logback-test.xml @@ -16,13 +16,20 @@ --> - + + + + + + + %d{HH:mm:ss} [%thread] %-5level %logger{50} - %msg%n + - \ No newline at end of file + diff --git a/ingestion/src/test/resources/specs/importJobSpecs.yaml b/ingestion/src/test/resources/specs/importJobSpecs.yaml index 74fd5745644..6f52cb30aae 100644 --- a/ingestion/src/test/resources/specs/importJobSpecs.yaml +++ b/ingestion/src/test/resources/specs/importJobSpecs.yaml @@ -1,20 +1,16 @@ -importSpec: {} -servingStorageSpec: +sourceSpec: {} +sinkStorageSpec: id: TEST_SERVING type: serving.mock options: {} -warehouseStorageSpec: - id: TEST_WAREHOUSE - type: warehouse.mock - options: {} errorsStorageSpec: id: errors type: errors.mock options: {} -entitySpecs: - - name: testEntity - description: This is a test entity - tags: [] +entitySpec: + name: testEntity + description: This is a test entity + tags: [] featureSpecs: - id: testEntity.testInt64 entity: testEntity diff --git a/pom.xml b/pom.xml index 8afd15d0a5d..78514288640 100644 --- a/pom.xml +++ b/pom.xml @@ -17,22 +17,72 @@ 4.0.0 + + Feast + Feature Store for Machine Learning + ${github.url} + feast feast-parent ${revision} pom - Feast Parent + + + ingestion + core + serving + sdk/java + - 0.1.1 + 0.3.0-SNAPSHOT + https://github.com/gojek/feast + UTF-8 - 3.6.1 - 1.14.0 - 3.6.1 - 2.0.4.RELEASE + UTF-8 + + 1.17.1 + 3.10.0 + 3.10.0 + 2.0.9.RELEASE + 2.16.0 + 1.91.0 + + 1.9.10 + 1.3 + 2.3.0 + 2.28.2 + + 0.21.0 + + Gojek + https://www.gojek.io/ + + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + ${github.url} + scm:git:${github.url}.git + scm:git:git@github.com:gojek/feast.git + HEAD + + + + GitHub Issues + ${github.url}/issues + + + feast-snapshot file:///tmp/snapshot @@ -43,45 +93,276 @@ - - ingestion - core - serving - + + + + + com.google.cloud + google-cloud-bigquery + ${com.google.cloud.version} + + + com.google.cloud + google-cloud-storage + ${com.google.cloud.version} + + + + + com.google.cloud + google-cloud-nio + 0.83.0-alpha + + + + io.opencensus + opencensus-api + ${opencensus.version} + + + io.opencensus + opencensus-contrib-grpc-util + ${opencensus.version} + + + io.opencensus + opencensus-contrib-http-util + ${opencensus.version} + + + + + io.grpc + grpc-netty + ${grpcVersion} + + + io.grpc + grpc-netty-shaded + ${grpcVersion} + + + io.grpc + grpc-protobuf + ${grpcVersion} + + + io.grpc + grpc-services + ${grpcVersion} + + + io.grpc + grpc-stub + ${grpcVersion} + + + io.grpc + grpc-testing + ${grpcVersion} + test + + + + + io.github.lognet + grpc-spring-boot-starter + 3.0.2 + + + + + com.datadoghq + java-dogstatsd-client + 2.6.1 + + + com.google.guava + guava + 26.0-jre + + + com.google.protobuf + protobuf-java + ${protobufVersion} + + + com.google.protobuf + protobuf-java-util + ${protobufVersion} + + + org.projectlombok + lombok + 1.18.2 + provided + + + + + com.github.kstyrc + embedded-redis + 0.6 + test + + + + + org.apache.kafka + kafka_2.12 + ${kafka.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + net.bytebuddy + byte-buddy + ${byte-buddy.version} + + + org.mockito + mockito-core + ${mockito.version} + test + + + org.springframework.boot + spring-boot-starter-web + ${springBootVersion} + + + org.springframework.boot + spring-boot-starter-logging + + + + + + + org.springframework.boot + spring-boot-dependencies + ${springBootVersion} + pom + import + + + kr.motd.maven os-maven-plugin - 1.6.0 + 1.6.2 + org.apache.maven.plugins maven-compiler-plugin - 3.7.0 + 3.8.1 1.8 1.8 + + -Xlint:all + + + -Xdoclint:-syntax + + + org.apache.maven.plugins + maven-enforcer-plugin + 3.0.0-M2 + + + valid-build-environment + + enforce + + + + + [3.5,4.0) + + + [1.8,1.9) + + + + + + + consistent-dependency-versions + + enforce + + + + + + + + + no-snapshot-deps-at-release + + enforce + + + + + true + + + + + + org.apache.maven.plugins maven-surefire-plugin 2.22.1 - -Djdk.net.URLClassPath.disableClassPathURLCheck=true + -Xms2048m -Xmx2048m -Djdk.net.URLClassPath.disableClassPathURLCheck=true IntegrationTest + + org.springframework.boot + spring-boot-maven-plugin + + + true + + + + io.fabric8 + docker-maven-plugin + 0.20.1 + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.1.0 + org.codehaus.mojo exec-maven-plugin @@ -105,8 +386,9 @@ org.xolstice.maven.plugins protobuf-maven-plugin - 0.5.1 + 0.6.1 + true com.google.protobuf:protoc:${protocVersion}:exe:${os.detected.classifier} diff --git a/protos/Makefile b/protos/Makefile index e49b8c32d6a..87e543be324 100644 --- a/protos/Makefile +++ b/protos/Makefile @@ -1,12 +1,16 @@ .PHONY: go -dirs = core serving specs storage types +dirs = core serving types storage service_dirs = core serving gen-go: - @$(foreach dir,$(dirs),protoc -I/usr/local/include -I. --go_out=plugins=grpc,paths=source_relative:generated/go feast/$(dir)/*.proto;) + @$(foreach dir,$(dirs),protoc -I/usr/local/include -I. --go_out=plugins=grpc,paths=source_relative:../sdk/go/protos/ feast/$(dir)/*.proto;) gen-python: pip install grpcio-tools - @$(foreach dir,$(dirs),python -m grpc_tools.protoc -I. --python_out=../sdk/python/ feast/$(dir)/*.proto;) - @$(foreach dir,$(service_dirs),python -m grpc_tools.protoc -I. --grpc_python_out=../sdk/python/ feast/$(dir)/*.proto;) \ No newline at end of file + pip install mypy-protobuf + @$(foreach dir,$(dirs),python -m grpc_tools.protoc -I. --python_out=../sdk/python/ --mypy_out=../sdk/python/ feast/$(dir)/*.proto;) + @$(foreach dir,$(service_dirs),python -m grpc_tools.protoc -I. --grpc_python_out=../sdk/python/ feast/$(dir)/*.proto;) + +gen-docs: + protoc --doc_out=../docs/api/ --doc_opt=../docs/assets/protoc-gen-doc-markdown.tmpl,proto.md feast/core/*.proto feast/serving/*.proto feast/storage/*.proto feast/types/*.proto; \ No newline at end of file diff --git a/protos/feast/core/CoreService.proto b/protos/feast/core/CoreService.proto index 3f4c16c2c76..2e0646e9730 100644 --- a/protos/feast/core/CoreService.proto +++ b/protos/feast/core/CoreService.proto @@ -1,146 +1,156 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// +// Copyright 2018 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// syntax = "proto3"; package feast.core; -import "feast/specs/EntitySpec.proto"; -import "feast/specs/FeatureSpec.proto"; -import "feast/specs/FeatureGroupSpec.proto"; -import "feast/specs/StorageSpec.proto"; -import "google/protobuf/empty.proto"; - -option java_package = "feast.core"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; option java_outer_classname = "CoreServiceProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/core"; +option java_package = "feast.core"; + +import "feast/core/FeatureSet.proto"; +import "feast/core/Store.proto"; service CoreService { - /* - Get entities specified in request. - This process returns a list of entity specs. - */ - rpc GetEntities(CoreServiceTypes.GetEntitiesRequest) returns (CoreServiceTypes.GetEntitiesResponse){}; - - /* - Get all entities - This process returns a list of entity specs. - */ - rpc ListEntities(google.protobuf.Empty) returns (CoreServiceTypes.ListEntitiesResponse) {}; - - /* - Get storage specs specified in request. - This process returns a list of storage specs. - */ - rpc GetStorage(CoreServiceTypes.GetStorageRequest) returns (CoreServiceTypes.GetStorageResponse){ - option deprecated = true; - }; - - /* - Get all storage specs. - This process returns a list of storage specs. - */ - rpc ListStorage(google.protobuf.Empty) returns (CoreServiceTypes.ListStorageResponse) { - option deprecated = true; - }; - - /* - Get features specified in request. - This process returns a list of feature specs. - */ - rpc GetFeatures(CoreServiceTypes.GetFeaturesRequest) returns (CoreServiceTypes.GetFeaturesResponse){}; - - /* - Get all features. - This process returns a list of entity specs. - */ - rpc ListFeatures(google.protobuf.Empty) returns (CoreServiceTypes.ListFeaturesResponse) {}; - - /* - Register a new feature to the metadata store, or update an existing feature. - If any validation errors occur, only the first encountered error will be returned. - */ - rpc ApplyFeature(feast.specs.FeatureSpec) returns (CoreServiceTypes.ApplyFeatureResponse) {}; - - /* - Register a new feature group to the metadata store, or update an existing feature group. - If any validation errors occur, only the first encountered error will be returned. - */ - rpc ApplyFeatureGroup(feast.specs.FeatureGroupSpec) returns (CoreServiceTypes.ApplyFeatureGroupResponse) {}; - - /* - Register a new entity to the metadata store, or update an existing entity. - If any validation errors occur, only the first encountered error will be returned. - */ - rpc ApplyEntity(feast.specs.EntitySpec) returns (CoreServiceTypes.ApplyEntityResponse) {}; + // Retrieve version information about this Feast deployment + rpc GetFeastCoreVersion (GetFeastCoreVersionRequest) returns (GetFeastCoreVersionResponse); + + // Returns a specific feature set + rpc GetFeatureSet (GetFeatureSetRequest) returns (GetFeatureSetResponse); + + // Retrieve feature set details given a filter. + // + // Returns all feature sets matching that filter. If none are found, + // an empty list will be returned. + // If no filter is provided in the request, the response will contain all the feature + // sets currently stored in the registry. + rpc ListFeatureSets (ListFeatureSetsRequest) returns (ListFeatureSetsResponse); + + // Retrieve store details given a filter. + // + // Returns all stores matching that filter. If none are found, an empty list will be returned. + // If no filter is provided in the request, the response will contain all the stores currently + // stored in the registry. + rpc ListStores (ListStoresRequest) returns (ListStoresResponse); + + // Create or update and existing feature set. + // + // This function is idempotent - it will not create a new feature set if schema does not change. + // If an existing feature set is updated, core will advance the version number, which will be + // returned in response. + rpc ApplyFeatureSet (ApplyFeatureSetRequest) returns (ApplyFeatureSetResponse); + + // Updates core with the configuration of the store. + // + // If the changes are valid, core will return the given store configuration in response, and + // start or update the necessary feature population jobs for the updated store. + rpc UpdateStore(UpdateStoreRequest) returns (UpdateStoreResponse); +} + +// Request for a single feature set +message GetFeatureSetRequest { + // Name of feature set (required). + string name = 1; + + // Version of feature set (optional). If omitted then latest feature set will be returned. + int32 version = 2; +} + +// Response containing a single feature set +message GetFeatureSetResponse { + feast.core.FeatureSetSpec feature_set = 1; +} +// Retrieves details for all versions of a specific feature set +message ListFeatureSetsRequest { + message Filter { + // Name of the desired feature set. Valid regex strings are allowed. + // e.g. + // - .* can be used to match all feature sets + // - my-project-.* can be used to match all features prefixed by "my-project" + string feature_set_name = 1; + // Version of the desired feature set. Either a number or valid expression can be provided. + // e.g. + // - 1 will match version 1 exactly + // - >=1 will match all versions greater or equal to 1 + // - <10 will match all versions less than 10 + string feature_set_version = 2; + } + + Filter filter = 1; } -message CoreServiceTypes { - message GetEntitiesRequest { - repeated string ids = 1; - } - - message GetEntitiesResponse { - repeated feast.specs.EntitySpec entities = 1; - } - - message ListEntitiesResponse { - repeated feast.specs.EntitySpec entities = 1; - } - - // Feature retrieval - message GetFeaturesRequest { - repeated string ids = 1; - } - - message GetFeaturesResponse { - repeated feast.specs.FeatureSpec features = 1; - } - - message ListFeaturesResponse { - repeated feast.specs.FeatureSpec features = 1; - } - - // Storage spec retrieval - message GetStorageRequest { - repeated string ids = 1; - } - - message GetStorageResponse { - repeated feast.specs.StorageSpec storageSpecs = 1; - } - - message ListStorageResponse { - repeated feast.specs.StorageSpec storageSpecs = 1; - } - - // Entity registration response - message ApplyEntityResponse { - string entityName = 1; - } - - // Feature registration response - message ApplyFeatureResponse { - string featureId = 1; - } - - // Feature group registration response - message ApplyFeatureGroupResponse { - string featureGroupId = 1; - } +message ListFeatureSetsResponse { + repeated feast.core.FeatureSetSpec feature_sets = 1; +} + +message ListStoresRequest { + message Filter { + // Name of desired store. Regex is not supported in this query. + string name = 1; + } + + Filter filter = 1; +} +message ListStoresResponse { + repeated feast.core.Store store = 1; } + +message ApplyFeatureSetRequest { + // Feature set version and source will be ignored + feast.core.FeatureSetSpec feature_set = 1; +} + +message ApplyFeatureSetResponse { + enum Status { + // Latest feature set version is consistent with provided feature set + NO_CHANGE = 0; + + // New feature set or feature set version created + CREATED = 1; + + // Error occurred while trying to apply changes + ERROR = 2; + } + + // Feature set response has been enriched with version and source information + feast.core.FeatureSetSpec feature_set = 1; + Status status = 2; +} + +message GetFeastCoreVersionRequest {} + +message GetFeastCoreVersionResponse { + string version = 1; +} + +message UpdateStoreRequest { + feast.core.Store store = 1; +} + +message UpdateStoreResponse { + enum Status { + // Existing store config matching the given store id is identical to the given store config. + NO_CHANGE = 0; + + // New store created or existing config updated. + UPDATED = 1; + } + feast.core.Store store = 1; + Status status = 2; +} \ No newline at end of file diff --git a/protos/feast/core/DatasetService.proto b/protos/feast/core/DatasetService.proto deleted file mode 100644 index c094b41b374..00000000000 --- a/protos/feast/core/DatasetService.proto +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.core; - -import "google/protobuf/timestamp.proto"; - -option java_package = "feast.core"; -option java_outer_classname = "DatasetServiceProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/core"; - -service DatasetService { - // Create training dataset for a feature set - rpc CreateDataset(DatasetServiceTypes.CreateDatasetRequest) returns (DatasetServiceTypes.CreateDatasetResponse){}; -} - -message DatasetServiceTypes { - message CreateDatasetRequest { - // set of features for which its training data should be created - FeatureSet featureSet = 1; - // start date of the training data (inclusive) - google.protobuf.Timestamp startDate = 2; - // end date of the training data (inclusive) - google.protobuf.Timestamp endDate = 3; - // (optional) number of row that should be generated - // (default: none) - int64 limit = 4; - // (optional) prefix for dataset name - string namePrefix = 5; - // (optional) additional WHERE clause, all filter entry will be combined with logic AND - map filters = 6; - } - - message CreateDatasetResponse { - // information of the created training dataset - DatasetInfo datasetInfo = 1; - } -} - -// Represent a collection of feature having same entity name -message FeatureSet { - // entity related to this feature set - string entityName = 1; - // list of feature id in this feature set - repeated string featureIds = 2; -} - -// Representation of training dataset information -message DatasetInfo { - // name of dataset - string name = 1; - // URL to table location of the training dataset - string tableUrl = 2; -} \ No newline at end of file diff --git a/protos/feast/core/FeatureSet.proto b/protos/feast/core/FeatureSet.proto new file mode 100644 index 00000000000..a80ae36f088 --- /dev/null +++ b/protos/feast/core/FeatureSet.proto @@ -0,0 +1,69 @@ +// +// * Copyright 2019 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; + +option java_package = "feast.core"; +option java_outer_classname = "FeatureSetProto"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; + +import "feast/types/Value.proto"; +import "feast/core/Source.proto"; +import "google/protobuf/duration.proto"; + +message FeatureSetSpec { + // Name of the featureSet. Must be unique. + string name = 1; + + // FeatureSet version. + int32 version = 2; + + // List of entities contained within this featureSet. + // This allows the feature to be used during joins between feature sets. + // If the featureSet is ingested into a store that supports keys, this value + // will be made a key. + repeated EntitySpec entities = 3; + + // List of features contained within this featureSet. + repeated FeatureSpec features = 4; + + // Features in this feature set will only be retrieved if they are found + // after [time - max_age]. Missing or older feature values will be returned + // as nulls and indicated to end user + google.protobuf.Duration max_age = 5; + + // Optional. Source on which feature rows can be found. + // If not set, source will be set to the default value configured in Feast Core. + Source source = 6; +} + +message EntitySpec { + // Name of the entity. + string name = 1; + + // Value type of the feature. + feast.types.ValueType.Enum value_type = 2; +} + +message FeatureSpec { + // Name of the feature. + string name = 1; + + // Value type of the feature. + feast.types.ValueType.Enum value_type = 2; +} diff --git a/protos/feast/core/JobService.proto b/protos/feast/core/JobService.proto deleted file mode 100644 index 52a1cafec78..00000000000 --- a/protos/feast/core/JobService.proto +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.core; - -import "feast/specs/ImportSpec.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option java_package = "feast.core"; -option java_outer_classname = "JobServiceProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/core"; - -service JobService { - // Submit a job to feast to run. Returns the job id. - rpc SubmitJob (JobServiceTypes.SubmitImportJobRequest) returns (JobServiceTypes.SubmitImportJobResponse); - - // List all jobs submitted to feast. - rpc ListJobs (google.protobuf.Empty) returns (JobServiceTypes.ListJobsResponse); - - // Get Job with ID - rpc GetJob (JobServiceTypes.GetJobRequest) returns (JobServiceTypes.GetJobResponse); - - // Abort job with given ID - rpc AbortJob(JobServiceTypes.AbortJobRequest) returns (JobServiceTypes.AbortJobResponse); -} - -message JobServiceTypes { - message SubmitImportJobRequest { - feast.specs.ImportSpec importSpec = 1; - string name = 2; // optional - } - - message SubmitImportJobResponse { - string jobId = 1; - } - - message ListJobsResponse { - repeated JobDetail jobs = 1; - } - - message GetJobRequest { - string id = 1; - } - - message GetJobResponse { - JobDetail job = 1; - } - - message AbortJobRequest { - string id = 1; - } - - message AbortJobResponse { - string id = 1; - } - - // Expanded view of a given job. Returns job information, as well - // as latest metrics. - message JobDetail { - string id = 1; - string extId = 2; - string type = 3; - string runner = 4; - string status = 5; - repeated string entities = 6; - repeated string features = 7; - map metrics = 8; - google.protobuf.Timestamp lastUpdated = 9; - google.protobuf.Timestamp created = 10; - } -} diff --git a/protos/feast/core/Source.proto b/protos/feast/core/Source.proto new file mode 100644 index 00000000000..8a6cbd415a2 --- /dev/null +++ b/protos/feast/core/Source.proto @@ -0,0 +1,47 @@ +// +// * Copyright 2019 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; +package feast.core; + +option java_package = "feast.core"; +option java_outer_classname = "SourceProto"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; + + +message Source { + + // The kind of data source Feast should connect to in order to retrieve FeatureRow value + SourceType type = 1; + + // Source specific configuration + oneof source_config { + KafkaSourceConfig kafka_source_config = 2; + } +} + +enum SourceType { + INVALID = 0; + KAFKA = 1; +} + +message KafkaSourceConfig { + // - bootstrapServers: [comma delimited value of host[:port]] + string bootstrap_servers = 1; + + // - topics: [Kafka topic name. This value is provisioned by core and should not be set by the user.] + string topic = 2; +} \ No newline at end of file diff --git a/protos/feast/core/Store.proto b/protos/feast/core/Store.proto new file mode 100644 index 00000000000..e1b8c581a38 --- /dev/null +++ b/protos/feast/core/Store.proto @@ -0,0 +1,154 @@ +// +// * Copyright 2019 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; +package feast.core; + +option java_package = "feast.core"; +option java_outer_classname = "StoreProto"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; + +// Store provides a location where Feast reads and writes feature values. +// Feature values will be written to the Store in the form of FeatureRow elements. +// The way FeatureRow is encoded and decoded when it is written to and read from +// the Store depends on the type of the Store. +// +// For example, a FeatureRow will materialize as a row in a table in +// BigQuery but it will materialize as a key, value pair element in Redis. +// +message Store { + + enum StoreType { + INVALID = 0; + + // Redis stores a FeatureRow element as a key, value pair. + // + // The Redis data types used (https://redis.io/topics/data-types): + // - key: STRING + // - value: STRING + // + // Encodings: + // - key: byte array of RedisKey (refer to feast.storage.RedisKey) + // - value: byte array of FeatureRow (refer to feast.types.FeatureRow) + // + REDIS = 1; + + // BigQuery stores a FeatureRow element as a row in a BigQuery table. + // + // Table name is derived from the feature set name and version as: + // [feature_set_name]_v[feature_set_version] + // + // For example: + // A feature row for feature set "driver" and version "1" will be written + // to table "driver_v1". + // + // The entities and features in a FeatureSetSpec corresponds to the + // fields in the BigQuery table (these make up the BigQuery schema). + // The name of the entity spec and feature spec corresponds to the column + // names, and the value_type of entity spec and feature spec corresponds + // to BigQuery standard SQL data type of the column. + // + // The following BigQuery fields are reserved for Feast internal use. + // Ingestion of entity or feature spec with names identical + // to the following field names will raise an exception during ingestion. + // + // column_name | column_data_type | description + // ====================|==================|================================ + // - event_timestamp | TIMESTAMP | event time of the FeatureRow + // - created_timestamp | TIMESTAMP | processing time of the ingestion of the FeatureRow + // - job_id | STRING | identifier for the job that writes the FeatureRow to the corresponding BigQuery table + // + // BigQuery table created will be partitioned by the field "event_timestamp" + // of the FeatureRow (https://cloud.google.com/bigquery/docs/partitioned-tables). + // + // Since newer version of feature set can introduce breaking, non backward- + // compatible BigQuery schema updates, incrementing the version of a + // feature set will result in the creation of a new empty BigQuery table + // with the new schema. + // + // The following table shows how ValueType in Feast is mapped to + // BigQuery Standard SQL data types + // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types): + // + // BYTES : BYTES + // STRING : STRING + // INT32 : INT64 + // INT64 : IN64 + // DOUBLE : FLOAT64 + // FLOAT : FLOAT64 + // BOOL : BOOL + // BYTES_LIST : ARRAY + // STRING_LIST : ARRAY + // INT32_LIST : ARRAY + // INT64_LIST : ARRAY + // DOUBLE_LIST : ARRAY + // FLOAT_LIST : ARRAY + // BOOL_LIST : ARRAY + // + // The column mode in BigQuery is set to "Nullable" such that unset Value + // in a FeatureRow corresponds to NULL value in BigQuery. + // + BIGQUERY = 2; + + // Unsupported in Feast 0.3 + CASSANDRA = 3; + } + + message RedisConfig { + string host = 1; + int32 port = 2; + } + + message BigQueryConfig { + string project_id = 1; + string dataset_id = 2; + } + + message CassandraConfig { + string host = 1; + int32 port = 2; + } + + message Subscription { + // Name of featureSet to subscribe to. This field supports any valid basic POSIX regex, + // e.g. customer_.* or .* + // https://www.regular-expressions.info/posix.html + string name = 1; + + // Versions of the given featureSet that will be ingested into this store. + // Valid options for version: + // latest: only subscribe to latest version of feature set + // [version number]: pin to a specific version + // >[version number]: subscribe to all versions larger than or equal to [version number] + string version = 2; + } + + // Name of the store. + string name = 1; + + // Type of store. + StoreType type = 2; + + // Feature sets to subscribe to. + repeated Subscription subscriptions = 4; + + // Configuration to connect to the store. Required. + oneof config { + RedisConfig redis_config = 11; + BigQueryConfig bigquery_config = 12; + CassandraConfig cassandra_config = 13; + } +} diff --git a/protos/feast/core/UIService.proto b/protos/feast/core/UIService.proto deleted file mode 100644 index 76b79d8f707..00000000000 --- a/protos/feast/core/UIService.proto +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.core; - -import "feast/specs/EntitySpec.proto"; -import "feast/specs/FeatureSpec.proto"; -import "feast/specs/FeatureGroupSpec.proto"; -import "feast/specs/StorageSpec.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option java_package = "feast.core"; -option java_outer_classname = "UIServiceProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/core"; - -service UIService { - /* - Get entity specified in request. - This process returns a single of entity specs. - */ - rpc GetEntity(UIServiceTypes.GetEntityRequest) returns (UIServiceTypes.GetEntityResponse) {}; - - /* - Get all entities. - This process returns a list of entity specs. - */ - rpc ListEntities(google.protobuf.Empty) returns (UIServiceTypes.ListEntitiesResponse) {}; - - /* - Get feature specified in request. - */ - rpc GetFeature(UIServiceTypes.GetFeatureRequest) returns (UIServiceTypes.GetFeatureResponse){}; - - /* - Get all features. - This process returns a list of feature specs. - */ - rpc ListFeatures(google.protobuf.Empty) returns (UIServiceTypes.ListFeaturesResponse){}; - - /* - Get feature group specified in request. - */ - rpc GetFeatureGroup(UIServiceTypes.GetFeatureGroupRequest) returns (UIServiceTypes.GetFeatureGroupResponse){}; - - /* - Get all feature groups. - This process returns a list of feature group specs. - */ - rpc ListFeatureGroups(google.protobuf.Empty) returns (UIServiceTypes.ListFeatureGroupsResponse){}; - - /* - Get storage spec specified in request. - - */ - rpc GetStorage(UIServiceTypes.GetStorageRequest) returns (UIServiceTypes.GetStorageResponse){}; - - /* - Get all storage specs. - This process returns a list of storage specs. - */ - rpc ListStorage(google.protobuf.Empty) returns (UIServiceTypes.ListStorageResponse){}; -} - -message UIServiceTypes { - // Expanded entity spec - message EntityDetail { - feast.specs.EntitySpec spec = 1; - repeated string jobs = 2; - google.protobuf.Timestamp lastUpdated = 3; - } - - message GetEntityRequest { - string id = 1; - } - - message GetEntityResponse { - EntityDetail entity = 1; - } - - message ListEntitiesResponse { - repeated EntityDetail entities = 1; - } - - // Expanded feature spec - message FeatureDetail { - feast.specs.FeatureSpec spec = 1; - string bigqueryView = 2; - bool enabled = 3; - repeated string jobs = 4; - google.protobuf.Timestamp lastUpdated = 5; - google.protobuf.Timestamp created = 6; - } - - message GetFeatureRequest { - string id = 1; - } - - message GetFeatureResponse { - FeatureDetail feature = 1; - feast.specs.FeatureSpec rawSpec = 2; - } - - message ListFeaturesResponse { - repeated FeatureDetail features = 1; - } - - // Expanded feature group spec - message FeatureGroupDetail { - feast.specs.FeatureGroupSpec spec = 1; - google.protobuf.Timestamp lastUpdated = 2; - } - - message GetFeatureGroupRequest { - string id = 1; - } - - message GetFeatureGroupResponse { - FeatureGroupDetail featureGroup = 1; - } - - message ListFeatureGroupsResponse { - repeated FeatureGroupDetail featureGroups = 1; - } - - // Expanded storage spec - message StorageDetail { - feast.specs.StorageSpec spec = 1; - google.protobuf.Timestamp lastUpdated = 2; - } - - message GetStorageRequest { - string id = 1; - } - - message GetStorageResponse { - StorageDetail storage = 1; - } - - message ListStorageResponse { - repeated StorageDetail storage = 1; - } - -} diff --git a/protos/feast/serving/Serving.proto b/protos/feast/serving/Serving.proto deleted file mode 100644 index 07ec06ea181..00000000000 --- a/protos/feast/serving/Serving.proto +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.serving; - -import "google/protobuf/timestamp.proto"; -import "feast/types/Value.proto"; - -option java_package = "feast.serving"; -option java_outer_classname = "ServingAPIProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/serving"; - -service ServingAPI { - // Query features from Feast serving storage - rpc QueryFeatures (QueryFeaturesRequest) returns (QueryFeaturesResponse) {}; -} - -message QueryFeaturesRequest { - // e.g. "driver", "customer", "city". - string entityName = 1; - // List of entity ID. - repeated string entityId = 2; - // List of feature ID. - // feature ID is in the form of [entity_name].[feature_name] - // e.g: "driver.day.total_accepted_booking" - // all requested feature ID shall have same entity name. - repeated string featureId = 3; -} - -message QueryFeaturesResponse { - // Entity name of the response - string entityName = 1; - // map of entity ID and its entity's properties. - map entities = 2; -} - -message Entity { - // map of feature ID and its feature value. - map features = 1; -} - -message FeatureValue { - // value of feature - feast.types.Value value = 1; - // timestamp of the feature - google.protobuf.Timestamp timestamp = 2; -} diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto new file mode 100644 index 00000000000..fe896253a94 --- /dev/null +++ b/protos/feast/serving/ServingService.proto @@ -0,0 +1,196 @@ +/* + * Copyright 2018 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package feast.serving; + +import "google/protobuf/timestamp.proto"; +import "google/protobuf/duration.proto"; +import "feast/types/Value.proto"; + +option java_package = "feast.serving"; +option java_outer_classname = "ServingAPIProto"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/serving"; + +service ServingService { + // Get information about this Feast serving. + rpc GetFeastServingInfo (GetFeastServingInfoRequest) returns (GetFeastServingInfoResponse); + + // Get online features synchronously. + rpc GetOnlineFeatures (GetOnlineFeaturesRequest) returns (GetOnlineFeaturesResponse); + + // Get batch features asynchronously. + // + // The client should check the status of the returned job periodically by + // calling ReloadJob to determine if the job has completed successfully + // or with an error. If the job completes successfully i.e. + // status = JOB_STATUS_DONE with no error, then the client can check + // the file_uris for the location to download feature values data. + // The client is assumed to have access to these file URIs. + rpc GetBatchFeatures (GetBatchFeaturesRequest) returns (GetBatchFeaturesResponse); + + // Get the latest job status for batch feature retrieval. + rpc GetJob (GetJobRequest) returns (GetJobResponse); +} + +message GetFeastServingInfoRequest {} + +message GetFeastServingInfoResponse { + // Feast version of this serving deployment. + string version = 1; + + // Type of serving deployment, either ONLINE or BATCH. Different store types support different + // feature retrieval methods. + FeastServingType type = 2; + + // Note: Batch specific options start from 10. + // Staging location for this serving store, if any. + string job_staging_location = 10; +} + +message FeatureSetRequest { + // Feature set name + string name = 1; + + // Feature set version + int32 version = 2; + + // Features that should be retrieved from this feature set + repeated string feature_names = 3; + + // The features will be retrieved if: + // entity_timestamp - max_age <= event_timestamp <= entity_timestamp + // + // If unspecified the default max_age specified in FeatureSetSpec will + // be used. + google.protobuf.Duration max_age = 4; +} + +message GetOnlineFeaturesRequest { + // List of feature sets and their features that are being retrieved + repeated FeatureSetRequest feature_sets = 1; + + // List of entity rows, containing entity id and timestamp data. + // Used during retrieval of feature rows and for joining feature + // rows into a final dataset + repeated EntityRow entity_rows = 2; + + // Option to omit entities from the response. If true, only feature + // values will be returned. + bool omit_entities_in_response = 3; + + message EntityRow { + // Request timestamp of this row. This value will be used, together with maxAge, + // to determine feature staleness. + google.protobuf.Timestamp entity_timestamp = 1; + + // Map containing mapping of entity name to entity value. + map fields = 2; + } +} + +message GetBatchFeaturesRequest { + // List of feature sets and their features that are being retrieved. + repeated FeatureSetRequest feature_sets = 1; + + // Source of the entity dataset containing the timestamps and entity keys to retrieve + // features for. + DatasetSource dataset_source = 2; +} + +message GetOnlineFeaturesResponse { + // Feature values retrieved from feast. + repeated FieldValues field_values = 1; + + message FieldValues { + // Map of feature or entity name to feature/entity values. + // Timestamps are not returned in this response. + map fields = 1; + } +} + +message GetBatchFeaturesResponse { + Job job = 1; +} + +message GetJobRequest { + Job job = 1; +} + +message GetJobResponse { + Job job = 1; +} + +enum FeastServingType { + FEAST_SERVING_TYPE_INVALID = 0; + // Online serving receives entity data directly and synchronously and will + // respond immediately. + FEAST_SERVING_TYPE_ONLINE = 1; + // Batch serving receives entity data asynchronously and orchestrates the + // retrieval through a staging location. + FEAST_SERVING_TYPE_BATCH = 2; +} + +enum JobType { + JOB_TYPE_INVALID = 0; + JOB_TYPE_DOWNLOAD = 1; +} + +enum JobStatus { + JOB_STATUS_INVALID = 0; + JOB_STATUS_PENDING = 1; + JOB_STATUS_RUNNING = 2; + JOB_STATUS_DONE = 3; +} + +enum DataFormat { + DATA_FORMAT_INVALID = 0; + DATA_FORMAT_AVRO = 1; +} + +message Job { + string id = 1; + // Output only. The type of the job. + JobType type = 2; + // Output only. Current state of the job. + JobStatus status = 3; + // Output only. If not empty, the job has failed with this error message. + string error = 4; + // Output only. The list of URIs for the files to be downloaded or + // uploaded (depends on the job type) for this particular job. + repeated string file_uris = 5; + // Output only. The data format for all the files. + // For CSV format, the files contain both feature values and a column header. + DataFormat data_format = 6; +} + +message DatasetSource { + oneof dataset_source { + // File source to load the dataset from. + FileSource file_source = 1; + } + + message FileSource { + // URIs to retrieve the dataset from, e.g. gs://bucket/directory/object.csv. Wildcards are + // supported. This data must be compatible to be uploaded to the serving store, and also be + // accessible by this serving instance. + repeated string file_uris = 1; + + // Format of the data. Currently only avro is supported. + DataFormat data_format = 2; + } +} diff --git a/protos/feast/specs/EntitySpec.proto b/protos/feast/specs/EntitySpec.proto deleted file mode 100644 index 94642160dba..00000000000 --- a/protos/feast/specs/EntitySpec.proto +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.specs; - -option java_package = "feast.specs"; -option java_outer_classname = "EntitySpecProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -message EntitySpec { - string name = 1; - string description = 2; - repeated string tags = 3; -} diff --git a/protos/feast/specs/FeatureGroupSpec.proto b/protos/feast/specs/FeatureGroupSpec.proto deleted file mode 100644 index 048083d3da8..00000000000 --- a/protos/feast/specs/FeatureGroupSpec.proto +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -import "feast/specs/FeatureSpec.proto"; - -package feast.specs; - -option java_package = "feast.specs"; -option java_outer_classname = "FeatureGroupSpecProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -message FeatureGroupSpec { - string id = 1; - repeated string tags = 2; - map options = 3; -} diff --git a/protos/feast/specs/FeatureSpec.proto b/protos/feast/specs/FeatureSpec.proto deleted file mode 100644 index e355a9c373d..00000000000 --- a/protos/feast/specs/FeatureSpec.proto +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -import "feast/specs/EntitySpec.proto"; -import "feast/specs/StorageSpec.proto"; -import "feast/types/Value.proto"; - -package feast.specs; - -option java_package = "feast.specs"; -option java_outer_classname = "FeatureSpecProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -message FeatureSpec { - string id = 1; - string name = 2; - string owner = 3; - string description = 4; - string uri = 5; - feast.types.ValueType.Enum valueType = 7; - string entity = 8; - string group = 9; - repeated string tags = 10; - map options = 11; -} diff --git a/protos/feast/specs/ImportJobSpecs.proto b/protos/feast/specs/ImportJobSpecs.proto deleted file mode 100644 index 5267e05e886..00000000000 --- a/protos/feast/specs/ImportJobSpecs.proto +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.specs; - -import "feast/specs/ImportSpec.proto"; -import "feast/specs/EntitySpec.proto"; -import "feast/specs/FeatureSpec.proto"; -import "feast/specs/StorageSpec.proto"; - -option java_package = "feast.specs"; -option java_outer_classname = "ImportJobSpecsProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -message ImportJobSpecs { - string jobId = 1; - feast.specs.ImportSpec importSpec = 2; - repeated feast.specs.EntitySpec entitySpecs = 3; - repeated feast.specs.FeatureSpec featureSpecs = 4; - StorageSpec servingStorageSpec = 5; - StorageSpec warehouseStorageSpec = 6; - StorageSpec errorsStorageSpec = 7; -} diff --git a/protos/feast/specs/ImportSpec.proto b/protos/feast/specs/ImportSpec.proto deleted file mode 100644 index a6cab4f71d7..00000000000 --- a/protos/feast/specs/ImportSpec.proto +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.specs; - -option java_package = "feast.specs"; -option java_outer_classname = "ImportSpecProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -import "google/protobuf/timestamp.proto"; - -message ImportSpec { - string type = 1; - - map sourceOptions = 2; - - map jobOptions = 5; - - repeated string entities = 3; - - Schema schema = 4; -} - -message Schema { - repeated Field fields = 1; - // the event timestamp to set per row. - oneof timestamp { - string timestampColumn = 5; - google.protobuf.Timestamp timestampValue = 6; - } - string entityIdColumn = 7; -} - -message Field { - string name = 1; - string featureId = 2; // set this if this is a feature -} diff --git a/protos/feast/specs/StorageSpec.proto b/protos/feast/specs/StorageSpec.proto deleted file mode 100644 index f0d70d1a223..00000000000 --- a/protos/feast/specs/StorageSpec.proto +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.specs; - -option java_package = "feast.specs"; -option java_outer_classname = "StorageSpecProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/specs"; - -message StorageSpec { - // unique identifier for this instance - string id = 1; - - // type should define what sort of store it is - // e.g. redis, bigquery, etc - string type = 2; - - // options contain (but are not restricted to) options like - // connection information. - map options = 3; -} diff --git a/protos/feast/storage/BigTable.proto b/protos/feast/storage/BigTable.proto deleted file mode 100644 index 61cbed798b3..00000000000 --- a/protos/feast/storage/BigTable.proto +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -syntax = "proto3"; - -package feast.storage; - -option java_outer_classname = "BigTableProto"; -option java_package = "feast.storage"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/storage"; - -message BigTableRowKey { - // This should be the first 7 characters of a sha1 of the entityKey proto encoded - string sha1Prefix = 1; - string entityKey = 2; - string reversedMillis = 3; -} diff --git a/protos/feast/storage/Redis.proto b/protos/feast/storage/Redis.proto index 37691f97617..ae287f4e6bf 100644 --- a/protos/feast/storage/Redis.proto +++ b/protos/feast/storage/Redis.proto @@ -1,5 +1,5 @@ /* - * Copyright 2018 The Feast Authors + * Copyright 2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,53 +15,23 @@ */ syntax = "proto3"; -import "google/protobuf/timestamp.proto"; -import "feast/types/Value.proto"; +import "feast/types/Field.proto"; package feast.storage; option java_outer_classname = "RedisProto"; option java_package = "feast.storage"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/storage"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/storage"; -message RedisBucketKey { +message RedisKey { // Field number 1 is reserved for a future distributing hash if needed // (for when redis is clustered). - // Entity key from the FeatureRow - string entityKey = 2; + // FeatureSet this row belongs to, this is defined as featureSetName:version. + string feature_set = 2; - /** - * This should be the first 7 characters of a sha1 of the featureId - * This is just to save storage space as it's kept in memory. - */ - string featureIdSha1Prefix = 3; - - /** - * This groups a feature's values (for different eventTimestamps), - * into buckets so many can be retrieved together. - * - * See FeatureRowToRedisMutationDoFn. - * bucketId = roundedToGranularity(eventTimestamp).seconds / bucketSize.seconds - */ - fixed64 bucketId = 4; -} - -/** - * Because in redis features are stored as a key per feature not per - * feature row, we need the event timestamp in the value. - */ -message RedisBucketValue { - feast.types.Value value = 1; - google.protobuf.Timestamp eventTimestamp = 2; -} - - -/** - * This allows us to group multiple bucket values together in a - * single list to make it easier to keep sets together - */ -message RedisBucketValueList { - repeated RedisBucketValue values = 1; + // List of fields containing entity names and their respective values + // contained within this feature row. + repeated feast.types.Field entities = 3; } diff --git a/protos/feast/types/FeatureRow.proto b/protos/feast/types/FeatureRow.proto index 755fe0f582e..24293c6faa6 100644 --- a/protos/feast/types/FeatureRow.proto +++ b/protos/feast/types/FeatureRow.proto @@ -17,17 +17,26 @@ syntax = "proto3"; import "google/protobuf/timestamp.proto"; -import "feast/types/Feature.proto"; +import "feast/types/Field.proto"; package feast.types; option java_package = "feast.types"; option java_outer_classname = "FeatureRowProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/types"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; message FeatureRow { - string entityKey = 1; - repeated Feature features = 2; - google.protobuf.Timestamp eventTimestamp = 3; - string entityName = 4; -} + + // Fields in the feature row. + repeated Field fields = 2; + + // Timestamp of the feature row. While the actual definition of this timestamp may vary + // depending on the upstream feature creation pipelines, this is the timestamp that Feast + // will use to perform joins, determine latest values, and coalesce rows. + google.protobuf.Timestamp event_timestamp = 3; + + // Complete reference to the featureSet this featureRow belongs to, in the form of + // featureSetName:version. This value will be used by the feast ingestion job to filter + // rows, and write the values to the correct tables. + string feature_set = 6; +} \ No newline at end of file diff --git a/protos/feast/types/FeatureRowExtended.proto b/protos/feast/types/FeatureRowExtended.proto index 1326a7e7ff1..e88fbb73232 100644 --- a/protos/feast/types/FeatureRowExtended.proto +++ b/protos/feast/types/FeatureRowExtended.proto @@ -23,13 +23,13 @@ package feast.types; option java_package = "feast.types"; option java_outer_classname = "FeatureRowExtendedProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/types"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; message Error { string cause = 1; // exception class name string transform = 2; // name of transform where the error occurred string message = 3; - string stackTrace = 4; + string stack_trace = 4; } message Attempt { @@ -39,6 +39,6 @@ message Attempt { message FeatureRowExtended { FeatureRow row = 1; - Attempt lastAttempt = 2; - google.protobuf.Timestamp firstSeen = 3; + Attempt last_attempt = 2; + google.protobuf.Timestamp first_seen = 3; } diff --git a/protos/feast/types/Feature.proto b/protos/feast/types/Field.proto similarity index 82% rename from protos/feast/types/Feature.proto rename to protos/feast/types/Field.proto index a52bc8a995d..3929b16a322 100644 --- a/protos/feast/types/Feature.proto +++ b/protos/feast/types/Field.proto @@ -21,10 +21,10 @@ import "feast/types/Value.proto"; package feast.types; option java_package = "feast.types"; -option java_outer_classname = "FeatureProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/types"; +option java_outer_classname = "FieldProto"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; -message Feature { - string id = 1; +message Field { + string name = 1; feast.types.Value value = 2; } diff --git a/protos/feast/types/Value.proto b/protos/feast/types/Value.proto index e7c1f404ef3..065497f30a3 100644 --- a/protos/feast/types/Value.proto +++ b/protos/feast/types/Value.proto @@ -16,17 +16,15 @@ syntax = "proto3"; -import "google/protobuf/timestamp.proto"; - package feast.types; option java_package = "feast.types"; option java_outer_classname = "ValueProto"; -option go_package = "github.com/gojek/feast/protos/generated/go/feast/types"; +option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; message ValueType { enum Enum { - UNKNOWN = 0; + INVALID = 0; BYTES = 1; STRING = 2; INT32 = 3; @@ -34,36 +32,34 @@ message ValueType { DOUBLE = 5; FLOAT = 6; BOOL = 7; - TIMESTAMP = 8; + BYTES_LIST = 11; + STRING_LIST = 12; + INT32_LIST = 13; + INT64_LIST = 14; + DOUBLE_LIST = 15; + FLOAT_LIST = 16; + BOOL_LIST = 17; } } message Value { // ValueType is referenced by the metadata types, FeatureInfo and EntityInfo. // The enum values do not have to match the oneof val field ids, but they should. - oneof val { - bytes bytesVal = 1; - string stringVal = 2; - int32 int32Val = 3; - int64 int64Val = 4; - double doubleVal = 5; - float floatVal = 6; - bool boolVal = 7; - google.protobuf.Timestamp timestampVal = 8; - } -} - -message ValueList { - oneof valueList { - BytesList bytesList = 1; - StringList stringList = 2; - Int32List int32List = 3; - Int64List int64List = 4; - DoubleList doubleList = 5; - FloatList floatList = 6; - BoolList boolList = 7; - TimestampList timestampList = 8; + bytes bytes_val = 1; + string string_val = 2; + int32 int32_val = 3; + int64 int64_val = 4; + double double_val = 5; + float float_val = 6; + bool bool_val = 7; + BytesList bytes_list_val = 11; + StringList string_list_val = 12; + Int32List int32_list_val = 13; + Int64List int64_list_val = 14; + DoubleList double_list_val = 15; + FloatList float_list_val = 16; + BoolList bool_list_val = 17; } } @@ -94,7 +90,3 @@ message FloatList { message BoolList { repeated bool val = 1; } - -message TimestampList { - repeated google.protobuf.Timestamp val = 1; -} diff --git a/protos/generated/go/feast/core/CoreService.pb.go b/protos/generated/go/feast/core/CoreService.pb.go deleted file mode 100644 index d4e837b94d7..00000000000 --- a/protos/generated/go/feast/core/CoreService.pb.go +++ /dev/null @@ -1,974 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/core/CoreService.proto - -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type CoreServiceTypes struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes) Reset() { *m = CoreServiceTypes{} } -func (m *CoreServiceTypes) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes) ProtoMessage() {} -func (*CoreServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0} -} -func (m *CoreServiceTypes) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes.Unmarshal(m, b) -} -func (m *CoreServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes.Merge(dst, src) -} -func (m *CoreServiceTypes) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes.Size(m) -} -func (m *CoreServiceTypes) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes proto.InternalMessageInfo - -type CoreServiceTypes_GetEntitiesRequest struct { - Ids []string `protobuf:"bytes,1,rep,name=ids,proto3" json:"ids,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetEntitiesRequest) Reset() { *m = CoreServiceTypes_GetEntitiesRequest{} } -func (m *CoreServiceTypes_GetEntitiesRequest) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetEntitiesRequest) ProtoMessage() {} -func (*CoreServiceTypes_GetEntitiesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 0} -} -func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetEntitiesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Merge(dst, src) -} -func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Size(m) -} -func (m *CoreServiceTypes_GetEntitiesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetEntitiesRequest) GetIds() []string { - if m != nil { - return m.Ids - } - return nil -} - -type CoreServiceTypes_GetEntitiesResponse struct { - Entities []*specs.EntitySpec `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetEntitiesResponse) Reset() { *m = CoreServiceTypes_GetEntitiesResponse{} } -func (m *CoreServiceTypes_GetEntitiesResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetEntitiesResponse) ProtoMessage() {} -func (*CoreServiceTypes_GetEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 1} -} -func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Size(m) -} -func (m *CoreServiceTypes_GetEntitiesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetEntitiesResponse) GetEntities() []*specs.EntitySpec { - if m != nil { - return m.Entities - } - return nil -} - -type CoreServiceTypes_ListEntitiesResponse struct { - Entities []*specs.EntitySpec `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ListEntitiesResponse) Reset() { *m = CoreServiceTypes_ListEntitiesResponse{} } -func (m *CoreServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_ListEntitiesResponse) ProtoMessage() {} -func (*CoreServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 2} -} -func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Size(m) -} -func (m *CoreServiceTypes_ListEntitiesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ListEntitiesResponse) GetEntities() []*specs.EntitySpec { - if m != nil { - return m.Entities - } - return nil -} - -// Feature retrieval -type CoreServiceTypes_GetFeaturesRequest struct { - Ids []string `protobuf:"bytes,1,rep,name=ids,proto3" json:"ids,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetFeaturesRequest) Reset() { *m = CoreServiceTypes_GetFeaturesRequest{} } -func (m *CoreServiceTypes_GetFeaturesRequest) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetFeaturesRequest) ProtoMessage() {} -func (*CoreServiceTypes_GetFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 3} -} -func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetFeaturesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Merge(dst, src) -} -func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Size(m) -} -func (m *CoreServiceTypes_GetFeaturesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetFeaturesRequest) GetIds() []string { - if m != nil { - return m.Ids - } - return nil -} - -type CoreServiceTypes_GetFeaturesResponse struct { - Features []*specs.FeatureSpec `protobuf:"bytes,1,rep,name=features,proto3" json:"features,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetFeaturesResponse) Reset() { *m = CoreServiceTypes_GetFeaturesResponse{} } -func (m *CoreServiceTypes_GetFeaturesResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetFeaturesResponse) ProtoMessage() {} -func (*CoreServiceTypes_GetFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 4} -} -func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Size(m) -} -func (m *CoreServiceTypes_GetFeaturesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetFeaturesResponse) GetFeatures() []*specs.FeatureSpec { - if m != nil { - return m.Features - } - return nil -} - -type CoreServiceTypes_ListFeaturesResponse struct { - Features []*specs.FeatureSpec `protobuf:"bytes,1,rep,name=features,proto3" json:"features,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ListFeaturesResponse) Reset() { *m = CoreServiceTypes_ListFeaturesResponse{} } -func (m *CoreServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_ListFeaturesResponse) ProtoMessage() {} -func (*CoreServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 5} -} -func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Size(m) -} -func (m *CoreServiceTypes_ListFeaturesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ListFeaturesResponse) GetFeatures() []*specs.FeatureSpec { - if m != nil { - return m.Features - } - return nil -} - -// Storage spec retrieval -type CoreServiceTypes_GetStorageRequest struct { - Ids []string `protobuf:"bytes,1,rep,name=ids,proto3" json:"ids,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetStorageRequest) Reset() { *m = CoreServiceTypes_GetStorageRequest{} } -func (m *CoreServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetStorageRequest) ProtoMessage() {} -func (*CoreServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 6} -} -func (m *CoreServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetStorageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Merge(dst, src) -} -func (m *CoreServiceTypes_GetStorageRequest) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Size(m) -} -func (m *CoreServiceTypes_GetStorageRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetStorageRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetStorageRequest proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetStorageRequest) GetIds() []string { - if m != nil { - return m.Ids - } - return nil -} - -type CoreServiceTypes_GetStorageResponse struct { - StorageSpecs []*specs.StorageSpec `protobuf:"bytes,1,rep,name=storageSpecs,proto3" json:"storageSpecs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_GetStorageResponse) Reset() { *m = CoreServiceTypes_GetStorageResponse{} } -func (m *CoreServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_GetStorageResponse) ProtoMessage() {} -func (*CoreServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 7} -} -func (m *CoreServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_GetStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_GetStorageResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Size(m) -} -func (m *CoreServiceTypes_GetStorageResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_GetStorageResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_GetStorageResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_GetStorageResponse) GetStorageSpecs() []*specs.StorageSpec { - if m != nil { - return m.StorageSpecs - } - return nil -} - -type CoreServiceTypes_ListStorageResponse struct { - StorageSpecs []*specs.StorageSpec `protobuf:"bytes,1,rep,name=storageSpecs,proto3" json:"storageSpecs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ListStorageResponse) Reset() { *m = CoreServiceTypes_ListStorageResponse{} } -func (m *CoreServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_ListStorageResponse) ProtoMessage() {} -func (*CoreServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 8} -} -func (m *CoreServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ListStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ListStorageResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Size(m) -} -func (m *CoreServiceTypes_ListStorageResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ListStorageResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ListStorageResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ListStorageResponse) GetStorageSpecs() []*specs.StorageSpec { - if m != nil { - return m.StorageSpecs - } - return nil -} - -// Entity registration response -type CoreServiceTypes_ApplyEntityResponse struct { - EntityName string `protobuf:"bytes,1,opt,name=entityName,proto3" json:"entityName,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ApplyEntityResponse) Reset() { *m = CoreServiceTypes_ApplyEntityResponse{} } -func (m *CoreServiceTypes_ApplyEntityResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_ApplyEntityResponse) ProtoMessage() {} -func (*CoreServiceTypes_ApplyEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 9} -} -func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ApplyEntityResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Size(m) -} -func (m *CoreServiceTypes_ApplyEntityResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ApplyEntityResponse) GetEntityName() string { - if m != nil { - return m.EntityName - } - return "" -} - -// Feature registration response -type CoreServiceTypes_ApplyFeatureResponse struct { - FeatureId string `protobuf:"bytes,1,opt,name=featureId,proto3" json:"featureId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ApplyFeatureResponse) Reset() { *m = CoreServiceTypes_ApplyFeatureResponse{} } -func (m *CoreServiceTypes_ApplyFeatureResponse) String() string { return proto.CompactTextString(m) } -func (*CoreServiceTypes_ApplyFeatureResponse) ProtoMessage() {} -func (*CoreServiceTypes_ApplyFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 10} -} -func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ApplyFeatureResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Size(m) -} -func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ApplyFeatureResponse) GetFeatureId() string { - if m != nil { - return m.FeatureId - } - return "" -} - -// Feature group registration response -type CoreServiceTypes_ApplyFeatureGroupResponse struct { - FeatureGroupId string `protobuf:"bytes,1,opt,name=featureGroupId,proto3" json:"featureGroupId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) Reset() { - *m = CoreServiceTypes_ApplyFeatureGroupResponse{} -} -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) String() string { - return proto.CompactTextString(m) -} -func (*CoreServiceTypes_ApplyFeatureGroupResponse) ProtoMessage() {} -func (*CoreServiceTypes_ApplyFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_334455c996aa931b, []int{0, 11} -} -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Unmarshal(m, b) -} -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Marshal(b, m, deterministic) -} -func (dst *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Merge(dst, src) -} -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Size() int { - return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Size(m) -} -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse proto.InternalMessageInfo - -func (m *CoreServiceTypes_ApplyFeatureGroupResponse) GetFeatureGroupId() string { - if m != nil { - return m.FeatureGroupId - } - return "" -} - -func init() { - proto.RegisterType((*CoreServiceTypes)(nil), "feast.core.CoreServiceTypes") - proto.RegisterType((*CoreServiceTypes_GetEntitiesRequest)(nil), "feast.core.CoreServiceTypes.GetEntitiesRequest") - proto.RegisterType((*CoreServiceTypes_GetEntitiesResponse)(nil), "feast.core.CoreServiceTypes.GetEntitiesResponse") - proto.RegisterType((*CoreServiceTypes_ListEntitiesResponse)(nil), "feast.core.CoreServiceTypes.ListEntitiesResponse") - proto.RegisterType((*CoreServiceTypes_GetFeaturesRequest)(nil), "feast.core.CoreServiceTypes.GetFeaturesRequest") - proto.RegisterType((*CoreServiceTypes_GetFeaturesResponse)(nil), "feast.core.CoreServiceTypes.GetFeaturesResponse") - proto.RegisterType((*CoreServiceTypes_ListFeaturesResponse)(nil), "feast.core.CoreServiceTypes.ListFeaturesResponse") - proto.RegisterType((*CoreServiceTypes_GetStorageRequest)(nil), "feast.core.CoreServiceTypes.GetStorageRequest") - proto.RegisterType((*CoreServiceTypes_GetStorageResponse)(nil), "feast.core.CoreServiceTypes.GetStorageResponse") - proto.RegisterType((*CoreServiceTypes_ListStorageResponse)(nil), "feast.core.CoreServiceTypes.ListStorageResponse") - proto.RegisterType((*CoreServiceTypes_ApplyEntityResponse)(nil), "feast.core.CoreServiceTypes.ApplyEntityResponse") - proto.RegisterType((*CoreServiceTypes_ApplyFeatureResponse)(nil), "feast.core.CoreServiceTypes.ApplyFeatureResponse") - proto.RegisterType((*CoreServiceTypes_ApplyFeatureGroupResponse)(nil), "feast.core.CoreServiceTypes.ApplyFeatureGroupResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// CoreServiceClient is the client API for CoreService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type CoreServiceClient interface { - // - // Get entities specified in request. - // This process returns a list of entity specs. - GetEntities(ctx context.Context, in *CoreServiceTypes_GetEntitiesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetEntitiesResponse, error) - // - // Get all entities - // This process returns a list of entity specs. - ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListEntitiesResponse, error) - // - // Get storage specs specified in request. - // This process returns a list of storage specs. - GetStorage(ctx context.Context, in *CoreServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetStorageResponse, error) - // - // Get all storage specs. - // This process returns a list of storage specs. - ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListStorageResponse, error) - // - // Get features specified in request. - // This process returns a list of feature specs. - GetFeatures(ctx context.Context, in *CoreServiceTypes_GetFeaturesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetFeaturesResponse, error) - // - // Get all features. - // This process returns a list of entity specs. - ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListFeaturesResponse, error) - // - // Register a new feature to the metadata store, or update an existing feature. - // If any validation errors occur, only the first encountered error will be returned. - ApplyFeature(ctx context.Context, in *specs.FeatureSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureResponse, error) - // - // Register a new feature group to the metadata store, or update an existing feature group. - // If any validation errors occur, only the first encountered error will be returned. - ApplyFeatureGroup(ctx context.Context, in *specs.FeatureGroupSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) - // - // Register a new entity to the metadata store, or update an existing entity. - // If any validation errors occur, only the first encountered error will be returned. - ApplyEntity(ctx context.Context, in *specs.EntitySpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyEntityResponse, error) -} - -type coreServiceClient struct { - cc *grpc.ClientConn -} - -func NewCoreServiceClient(cc *grpc.ClientConn) CoreServiceClient { - return &coreServiceClient{cc} -} - -func (c *coreServiceClient) GetEntities(ctx context.Context, in *CoreServiceTypes_GetEntitiesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetEntitiesResponse, error) { - out := new(CoreServiceTypes_GetEntitiesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetEntities", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListEntitiesResponse, error) { - out := new(CoreServiceTypes_ListEntitiesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListEntities", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Deprecated: Do not use. -func (c *coreServiceClient) GetStorage(ctx context.Context, in *CoreServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetStorageResponse, error) { - out := new(CoreServiceTypes_GetStorageResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetStorage", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Deprecated: Do not use. -func (c *coreServiceClient) ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListStorageResponse, error) { - out := new(CoreServiceTypes_ListStorageResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListStorage", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) GetFeatures(ctx context.Context, in *CoreServiceTypes_GetFeaturesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetFeaturesResponse, error) { - out := new(CoreServiceTypes_GetFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListFeaturesResponse, error) { - out := new(CoreServiceTypes_ListFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ApplyFeature(ctx context.Context, in *specs.FeatureSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureResponse, error) { - out := new(CoreServiceTypes_ApplyFeatureResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyFeature", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ApplyFeatureGroup(ctx context.Context, in *specs.FeatureGroupSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) { - out := new(CoreServiceTypes_ApplyFeatureGroupResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyFeatureGroup", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ApplyEntity(ctx context.Context, in *specs.EntitySpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyEntityResponse, error) { - out := new(CoreServiceTypes_ApplyEntityResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyEntity", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// CoreServiceServer is the server API for CoreService service. -type CoreServiceServer interface { - // - // Get entities specified in request. - // This process returns a list of entity specs. - GetEntities(context.Context, *CoreServiceTypes_GetEntitiesRequest) (*CoreServiceTypes_GetEntitiesResponse, error) - // - // Get all entities - // This process returns a list of entity specs. - ListEntities(context.Context, *empty.Empty) (*CoreServiceTypes_ListEntitiesResponse, error) - // - // Get storage specs specified in request. - // This process returns a list of storage specs. - GetStorage(context.Context, *CoreServiceTypes_GetStorageRequest) (*CoreServiceTypes_GetStorageResponse, error) - // - // Get all storage specs. - // This process returns a list of storage specs. - ListStorage(context.Context, *empty.Empty) (*CoreServiceTypes_ListStorageResponse, error) - // - // Get features specified in request. - // This process returns a list of feature specs. - GetFeatures(context.Context, *CoreServiceTypes_GetFeaturesRequest) (*CoreServiceTypes_GetFeaturesResponse, error) - // - // Get all features. - // This process returns a list of entity specs. - ListFeatures(context.Context, *empty.Empty) (*CoreServiceTypes_ListFeaturesResponse, error) - // - // Register a new feature to the metadata store, or update an existing feature. - // If any validation errors occur, only the first encountered error will be returned. - ApplyFeature(context.Context, *specs.FeatureSpec) (*CoreServiceTypes_ApplyFeatureResponse, error) - // - // Register a new feature group to the metadata store, or update an existing feature group. - // If any validation errors occur, only the first encountered error will be returned. - ApplyFeatureGroup(context.Context, *specs.FeatureGroupSpec) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) - // - // Register a new entity to the metadata store, or update an existing entity. - // If any validation errors occur, only the first encountered error will be returned. - ApplyEntity(context.Context, *specs.EntitySpec) (*CoreServiceTypes_ApplyEntityResponse, error) -} - -func RegisterCoreServiceServer(s *grpc.Server, srv CoreServiceServer) { - s.RegisterService(&_CoreService_serviceDesc, srv) -} - -func _CoreService_GetEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CoreServiceTypes_GetEntitiesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetEntities(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetEntities", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetEntities(ctx, req.(*CoreServiceTypes_GetEntitiesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListEntities(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListEntities", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListEntities(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_GetStorage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CoreServiceTypes_GetStorageRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetStorage(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetStorage", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetStorage(ctx, req.(*CoreServiceTypes_GetStorageRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListStorage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListStorage(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListStorage", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListStorage(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_GetFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CoreServiceTypes_GetFeaturesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetFeatures(ctx, req.(*CoreServiceTypes_GetFeaturesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListFeatures(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ApplyFeature_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(specs.FeatureSpec) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ApplyFeature(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ApplyFeature", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ApplyFeature(ctx, req.(*specs.FeatureSpec)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ApplyFeatureGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(specs.FeatureGroupSpec) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ApplyFeatureGroup(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ApplyFeatureGroup", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ApplyFeatureGroup(ctx, req.(*specs.FeatureGroupSpec)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ApplyEntity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(specs.EntitySpec) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ApplyEntity(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ApplyEntity", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ApplyEntity(ctx, req.(*specs.EntitySpec)) - } - return interceptor(ctx, in, info, handler) -} - -var _CoreService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.CoreService", - HandlerType: (*CoreServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetEntities", - Handler: _CoreService_GetEntities_Handler, - }, - { - MethodName: "ListEntities", - Handler: _CoreService_ListEntities_Handler, - }, - { - MethodName: "GetStorage", - Handler: _CoreService_GetStorage_Handler, - }, - { - MethodName: "ListStorage", - Handler: _CoreService_ListStorage_Handler, - }, - { - MethodName: "GetFeatures", - Handler: _CoreService_GetFeatures_Handler, - }, - { - MethodName: "ListFeatures", - Handler: _CoreService_ListFeatures_Handler, - }, - { - MethodName: "ApplyFeature", - Handler: _CoreService_ApplyFeature_Handler, - }, - { - MethodName: "ApplyFeatureGroup", - Handler: _CoreService_ApplyFeatureGroup_Handler, - }, - { - MethodName: "ApplyEntity", - Handler: _CoreService_ApplyEntity_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/CoreService.proto", -} - -func init() { - proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_CoreService_334455c996aa931b) -} - -var fileDescriptor_CoreService_334455c996aa931b = []byte{ - // 589 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdb, 0x6e, 0x13, 0x3d, - 0x10, 0x4e, 0xfe, 0x48, 0xfd, 0x9b, 0x49, 0x84, 0x5a, 0xa7, 0x82, 0x60, 0x5a, 0x54, 0xad, 0x44, - 0xd5, 0x2b, 0xbb, 0xf4, 0xc0, 0x15, 0x37, 0xb4, 0x2a, 0x11, 0xb4, 0x42, 0x68, 0x03, 0x37, 0x05, - 0x2e, 0x72, 0x98, 0x2c, 0x0b, 0x49, 0xbc, 0xd8, 0x0e, 0x52, 0xde, 0x80, 0x47, 0xe0, 0x19, 0x78, - 0x4a, 0x14, 0xdb, 0xdd, 0x75, 0xd2, 0x4d, 0x37, 0x42, 0xb9, 0xdb, 0x9d, 0xc3, 0x37, 0x1e, 0x7f, - 0xf3, 0x79, 0x60, 0x77, 0x80, 0x1d, 0xa5, 0x79, 0x4f, 0x48, 0xe4, 0x17, 0x42, 0x62, 0x1b, 0xe5, - 0xcf, 0xb8, 0x87, 0x2c, 0x91, 0x42, 0x0b, 0x02, 0xc6, 0xcb, 0x66, 0x5e, 0xea, 0x22, 0x55, 0x82, - 0x3d, 0xc5, 0x2f, 0xc7, 0x3a, 0xd6, 0xd3, 0x76, 0x82, 0x3d, 0x1b, 0x49, 0xf7, 0x7c, 0xef, 0x6b, - 0xec, 0xe8, 0x89, 0x44, 0xcf, 0x1d, 0xe4, 0xb8, 0x5b, 0x52, 0x4c, 0x92, 0x65, 0x10, 0x6d, 0x2d, - 0x64, 0x27, 0xf2, 0x21, 0x9e, 0x44, 0x42, 0x44, 0x43, 0xe4, 0xe6, 0xaf, 0x3b, 0x19, 0x70, 0x1c, - 0x25, 0x7a, 0x6a, 0x9d, 0xc1, 0xef, 0x0d, 0xd8, 0xf2, 0x8e, 0xff, 0x61, 0x9a, 0xa0, 0xa2, 0x07, - 0x40, 0x5a, 0xa8, 0xcd, 0x51, 0x63, 0x54, 0x21, 0xfe, 0x98, 0xa0, 0xd2, 0x64, 0x0b, 0x2a, 0x71, - 0x5f, 0x35, 0xcb, 0xfb, 0x95, 0xc3, 0x6a, 0x38, 0xfb, 0xa4, 0x6f, 0xa1, 0x31, 0x17, 0xa7, 0x12, - 0x31, 0x56, 0x48, 0x4e, 0x60, 0x13, 0x9d, 0xcd, 0x44, 0xd7, 0x8e, 0x1f, 0x31, 0x7b, 0x1f, 0xe6, - 0x88, 0x2c, 0xbb, 0x83, 0x30, 0x0d, 0xa4, 0x57, 0xb0, 0x73, 0x1d, 0xab, 0x35, 0x81, 0xd9, 0x06, - 0xdc, 0x75, 0xdd, 0xd3, 0xc0, 0x95, 0x69, 0x20, 0x8b, 0x73, 0x35, 0x4f, 0x61, 0x73, 0xe0, 0x6c, - 0xae, 0x66, 0x73, 0xae, 0xa6, 0x47, 0x53, 0x98, 0x46, 0xd2, 0x6b, 0xdb, 0xc1, 0x9a, 0xd0, 0x9e, - 0xc1, 0x76, 0x0b, 0xb5, 0x63, 0x73, 0x79, 0x07, 0xa1, 0xe9, 0x34, 0x0d, 0x73, 0x25, 0x5f, 0x42, - 0x5d, 0x65, 0x73, 0x90, 0x5f, 0xd6, 0x1b, 0x94, 0x70, 0x2e, 0x9a, 0xb6, 0xa1, 0x31, 0x6b, 0x64, - 0xbd, 0xa0, 0x67, 0xd0, 0x78, 0x95, 0x24, 0xc3, 0xa9, 0xe5, 0x2b, 0x05, 0x7d, 0x0a, 0x60, 0x58, - 0x9b, 0xbe, 0xeb, 0x8c, 0xb0, 0x59, 0xde, 0x2f, 0x1f, 0x56, 0x43, 0xcf, 0x42, 0x4f, 0x61, 0xc7, - 0xa4, 0xb9, 0x4b, 0x4a, 0xf3, 0x76, 0xa1, 0xea, 0xae, 0xea, 0x4d, 0xdf, 0xa5, 0x65, 0x06, 0x7a, - 0x01, 0x8f, 0xfd, 0x2c, 0x23, 0x98, 0x34, 0xf5, 0x00, 0x1e, 0x0c, 0x3c, 0x7b, 0x9a, 0xbf, 0x60, - 0x3d, 0xfe, 0xf3, 0x3f, 0xd4, 0x3c, 0x69, 0x10, 0x09, 0x35, 0x6f, 0xda, 0x09, 0x67, 0x99, 0xc6, - 0xd9, 0xa2, 0x84, 0xd8, 0x5d, 0xfd, 0xd0, 0xa3, 0xd5, 0x13, 0xec, 0x49, 0x83, 0x12, 0xf9, 0x04, - 0x75, 0x5f, 0x15, 0xe4, 0x21, 0xb3, 0x62, 0x66, 0xb7, 0x62, 0x66, 0x97, 0x33, 0x31, 0xd3, 0xe7, - 0xf7, 0x62, 0xe7, 0x09, 0x2b, 0x28, 0x11, 0x09, 0x90, 0xcd, 0x0e, 0x61, 0x45, 0xc7, 0x9b, 0x9f, - 0x45, 0xca, 0x57, 0x8e, 0x77, 0x05, 0x2b, 0xbf, 0xfe, 0x2b, 0x93, 0xcf, 0x50, 0xf3, 0x66, 0x6b, - 0x69, 0x3f, 0x47, 0x85, 0xfd, 0xe4, 0xa2, 0x5b, 0x8a, 0x6e, 0x15, 0x58, 0x4c, 0xd1, 0xc2, 0x0b, - 0x51, 0x4c, 0xd1, 0xa2, 0xb8, 0x33, 0x8a, 0xd2, 0xa2, 0xff, 0x4e, 0x51, 0x0e, 0xf8, 0x17, 0xa8, - 0xfb, 0x83, 0x4c, 0x96, 0xbe, 0x1c, 0x05, 0xf0, 0x79, 0x1a, 0x0a, 0x4a, 0x64, 0x08, 0xdb, 0x77, - 0x74, 0x42, 0xf6, 0xf2, 0x6a, 0xa4, 0x3b, 0x87, 0xbe, 0x58, 0xb9, 0xd0, 0x9c, 0xec, 0x82, 0x12, - 0xb9, 0x81, 0x9a, 0xf7, 0x04, 0x90, 0x65, 0xef, 0x78, 0x01, 0x0b, 0x39, 0xaf, 0x48, 0x50, 0x3a, - 0xff, 0x08, 0xde, 0xca, 0x3d, 0xf7, 0x57, 0xda, 0xfb, 0x19, 0x0d, 0x37, 0x67, 0x51, 0xac, 0xbf, - 0x4e, 0xba, 0xac, 0x27, 0x46, 0x3c, 0x12, 0xdf, 0xf0, 0x3b, 0xb7, 0x4b, 0xd3, 0x90, 0xa4, 0x78, - 0x84, 0x63, 0x94, 0x1d, 0x8d, 0x7d, 0x1e, 0x09, 0x9e, 0x6d, 0xf6, 0xee, 0x86, 0xf1, 0x9f, 0xfc, - 0x0d, 0x00, 0x00, 0xff, 0xff, 0x58, 0xe3, 0xdf, 0xd7, 0xee, 0x07, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/DatasetService.pb.go b/protos/generated/go/feast/core/DatasetService.pb.go deleted file mode 100644 index d2d3b01c932..00000000000 --- a/protos/generated/go/feast/core/DatasetService.pb.go +++ /dev/null @@ -1,388 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/core/DatasetService.proto - -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type DatasetServiceTypes struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DatasetServiceTypes) Reset() { *m = DatasetServiceTypes{} } -func (m *DatasetServiceTypes) String() string { return proto.CompactTextString(m) } -func (*DatasetServiceTypes) ProtoMessage() {} -func (*DatasetServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0} -} -func (m *DatasetServiceTypes) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DatasetServiceTypes.Unmarshal(m, b) -} -func (m *DatasetServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DatasetServiceTypes.Marshal(b, m, deterministic) -} -func (dst *DatasetServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes.Merge(dst, src) -} -func (m *DatasetServiceTypes) XXX_Size() int { - return xxx_messageInfo_DatasetServiceTypes.Size(m) -} -func (m *DatasetServiceTypes) XXX_DiscardUnknown() { - xxx_messageInfo_DatasetServiceTypes.DiscardUnknown(m) -} - -var xxx_messageInfo_DatasetServiceTypes proto.InternalMessageInfo - -type DatasetServiceTypes_CreateDatasetRequest struct { - // set of features for which its training data should be created - FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=featureSet,proto3" json:"featureSet,omitempty"` - // start date of the training data (inclusive) - StartDate *timestamp.Timestamp `protobuf:"bytes,2,opt,name=startDate,proto3" json:"startDate,omitempty"` - // end date of the training data (inclusive) - EndDate *timestamp.Timestamp `protobuf:"bytes,3,opt,name=endDate,proto3" json:"endDate,omitempty"` - // (optional) number of row that should be generated - // (default: none) - Limit int64 `protobuf:"varint,4,opt,name=limit,proto3" json:"limit,omitempty"` - // (optional) prefix for dataset name - NamePrefix string `protobuf:"bytes,5,opt,name=namePrefix,proto3" json:"namePrefix,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DatasetServiceTypes_CreateDatasetRequest) Reset() { - *m = DatasetServiceTypes_CreateDatasetRequest{} -} -func (m *DatasetServiceTypes_CreateDatasetRequest) String() string { return proto.CompactTextString(m) } -func (*DatasetServiceTypes_CreateDatasetRequest) ProtoMessage() {} -func (*DatasetServiceTypes_CreateDatasetRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 0} -} -func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Unmarshal(m, b) -} -func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Marshal(b, m, deterministic) -} -func (dst *DatasetServiceTypes_CreateDatasetRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Merge(dst, src) -} -func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Size() int { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Size(m) -} -func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_DiscardUnknown() { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest proto.InternalMessageInfo - -func (m *DatasetServiceTypes_CreateDatasetRequest) GetFeatureSet() *FeatureSet { - if m != nil { - return m.FeatureSet - } - return nil -} - -func (m *DatasetServiceTypes_CreateDatasetRequest) GetStartDate() *timestamp.Timestamp { - if m != nil { - return m.StartDate - } - return nil -} - -func (m *DatasetServiceTypes_CreateDatasetRequest) GetEndDate() *timestamp.Timestamp { - if m != nil { - return m.EndDate - } - return nil -} - -func (m *DatasetServiceTypes_CreateDatasetRequest) GetLimit() int64 { - if m != nil { - return m.Limit - } - return 0 -} - -func (m *DatasetServiceTypes_CreateDatasetRequest) GetNamePrefix() string { - if m != nil { - return m.NamePrefix - } - return "" -} - -type DatasetServiceTypes_CreateDatasetResponse struct { - // information of the created training dataset - DatasetInfo *DatasetInfo `protobuf:"bytes,1,opt,name=datasetInfo,proto3" json:"datasetInfo,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DatasetServiceTypes_CreateDatasetResponse) Reset() { - *m = DatasetServiceTypes_CreateDatasetResponse{} -} -func (m *DatasetServiceTypes_CreateDatasetResponse) String() string { return proto.CompactTextString(m) } -func (*DatasetServiceTypes_CreateDatasetResponse) ProtoMessage() {} -func (*DatasetServiceTypes_CreateDatasetResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 1} -} -func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Unmarshal(m, b) -} -func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Marshal(b, m, deterministic) -} -func (dst *DatasetServiceTypes_CreateDatasetResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Merge(dst, src) -} -func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Size() int { - return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Size(m) -} -func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_DiscardUnknown() { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse proto.InternalMessageInfo - -func (m *DatasetServiceTypes_CreateDatasetResponse) GetDatasetInfo() *DatasetInfo { - if m != nil { - return m.DatasetInfo - } - return nil -} - -// Represent a collection of feature having same entity name -type FeatureSet struct { - // entity related to this feature set - EntityName string `protobuf:"bytes,1,opt,name=entityName,proto3" json:"entityName,omitempty"` - // list of feature id in this feature set - FeatureIds []string `protobuf:"bytes,2,rep,name=featureIds,proto3" json:"featureIds,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FeatureSet) Reset() { *m = FeatureSet{} } -func (m *FeatureSet) String() string { return proto.CompactTextString(m) } -func (*FeatureSet) ProtoMessage() {} -func (*FeatureSet) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{1} -} -func (m *FeatureSet) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureSet.Unmarshal(m, b) -} -func (m *FeatureSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureSet.Marshal(b, m, deterministic) -} -func (dst *FeatureSet) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureSet.Merge(dst, src) -} -func (m *FeatureSet) XXX_Size() int { - return xxx_messageInfo_FeatureSet.Size(m) -} -func (m *FeatureSet) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureSet.DiscardUnknown(m) -} - -var xxx_messageInfo_FeatureSet proto.InternalMessageInfo - -func (m *FeatureSet) GetEntityName() string { - if m != nil { - return m.EntityName - } - return "" -} - -func (m *FeatureSet) GetFeatureIds() []string { - if m != nil { - return m.FeatureIds - } - return nil -} - -// Representation of training dataset information -type DatasetInfo struct { - // name of dataset - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // URL to table location of the training dataset - TableUrl string `protobuf:"bytes,2,opt,name=tableUrl,proto3" json:"tableUrl,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DatasetInfo) Reset() { *m = DatasetInfo{} } -func (m *DatasetInfo) String() string { return proto.CompactTextString(m) } -func (*DatasetInfo) ProtoMessage() {} -func (*DatasetInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{2} -} -func (m *DatasetInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DatasetInfo.Unmarshal(m, b) -} -func (m *DatasetInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DatasetInfo.Marshal(b, m, deterministic) -} -func (dst *DatasetInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetInfo.Merge(dst, src) -} -func (m *DatasetInfo) XXX_Size() int { - return xxx_messageInfo_DatasetInfo.Size(m) -} -func (m *DatasetInfo) XXX_DiscardUnknown() { - xxx_messageInfo_DatasetInfo.DiscardUnknown(m) -} - -var xxx_messageInfo_DatasetInfo proto.InternalMessageInfo - -func (m *DatasetInfo) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *DatasetInfo) GetTableUrl() string { - if m != nil { - return m.TableUrl - } - return "" -} - -func init() { - proto.RegisterType((*DatasetServiceTypes)(nil), "feast.core.DatasetServiceTypes") - proto.RegisterType((*DatasetServiceTypes_CreateDatasetRequest)(nil), "feast.core.DatasetServiceTypes.CreateDatasetRequest") - proto.RegisterType((*DatasetServiceTypes_CreateDatasetResponse)(nil), "feast.core.DatasetServiceTypes.CreateDatasetResponse") - proto.RegisterType((*FeatureSet)(nil), "feast.core.FeatureSet") - proto.RegisterType((*DatasetInfo)(nil), "feast.core.DatasetInfo") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// DatasetServiceClient is the client API for DatasetService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type DatasetServiceClient interface { - // Create training dataset for a feature set - CreateDataset(ctx context.Context, in *DatasetServiceTypes_CreateDatasetRequest, opts ...grpc.CallOption) (*DatasetServiceTypes_CreateDatasetResponse, error) -} - -type datasetServiceClient struct { - cc *grpc.ClientConn -} - -func NewDatasetServiceClient(cc *grpc.ClientConn) DatasetServiceClient { - return &datasetServiceClient{cc} -} - -func (c *datasetServiceClient) CreateDataset(ctx context.Context, in *DatasetServiceTypes_CreateDatasetRequest, opts ...grpc.CallOption) (*DatasetServiceTypes_CreateDatasetResponse, error) { - out := new(DatasetServiceTypes_CreateDatasetResponse) - err := c.cc.Invoke(ctx, "/feast.core.DatasetService/CreateDataset", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// DatasetServiceServer is the server API for DatasetService service. -type DatasetServiceServer interface { - // Create training dataset for a feature set - CreateDataset(context.Context, *DatasetServiceTypes_CreateDatasetRequest) (*DatasetServiceTypes_CreateDatasetResponse, error) -} - -func RegisterDatasetServiceServer(s *grpc.Server, srv DatasetServiceServer) { - s.RegisterService(&_DatasetService_serviceDesc, srv) -} - -func _DatasetService_CreateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DatasetServiceTypes_CreateDatasetRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(DatasetServiceServer).CreateDataset(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.DatasetService/CreateDataset", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(DatasetServiceServer).CreateDataset(ctx, req.(*DatasetServiceTypes_CreateDatasetRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _DatasetService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.DatasetService", - HandlerType: (*DatasetServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateDataset", - Handler: _DatasetService_CreateDataset_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/DatasetService.proto", -} - -func init() { - proto.RegisterFile("feast/core/DatasetService.proto", fileDescriptor_DatasetService_37ae639a8c7b5dd5) -} - -var fileDescriptor_DatasetService_37ae639a8c7b5dd5 = []byte{ - // 414 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x6e, 0xd4, 0x30, - 0x10, 0x25, 0xbb, 0x2d, 0x90, 0x59, 0xc1, 0xc1, 0x14, 0x88, 0x72, 0xa0, 0x51, 0x4e, 0x7b, 0xb2, - 0xa5, 0xd2, 0x22, 0x38, 0x70, 0x29, 0x2b, 0xa4, 0x4a, 0x08, 0x55, 0x6e, 0x91, 0x10, 0x37, 0x67, - 0x33, 0x09, 0x81, 0x24, 0x0e, 0xf6, 0x04, 0xd1, 0x0b, 0xdf, 0xc0, 0x27, 0xf2, 0x11, 0x7c, 0x00, - 0x8a, 0xb3, 0xdb, 0x78, 0xab, 0x4a, 0x88, 0x9b, 0x3d, 0xef, 0xcd, 0x78, 0xde, 0xf8, 0x0d, 0x1c, - 0x16, 0xa8, 0x2c, 0x89, 0xb5, 0x36, 0x28, 0x56, 0x8a, 0x94, 0x45, 0xba, 0x40, 0xf3, 0xbd, 0x5a, - 0x23, 0xef, 0x8c, 0x26, 0xcd, 0xc0, 0x11, 0xf8, 0x40, 0x88, 0x0f, 0x4b, 0xad, 0xcb, 0x1a, 0x85, - 0x43, 0xb2, 0xbe, 0x10, 0x54, 0x35, 0x68, 0x49, 0x35, 0xdd, 0x48, 0x4e, 0x7f, 0xcf, 0xe0, 0xd1, - 0x6e, 0x95, 0xcb, 0xab, 0x0e, 0x6d, 0xfc, 0x27, 0x80, 0x83, 0x37, 0x06, 0x15, 0xe1, 0x06, 0x95, - 0xf8, 0xad, 0x47, 0x4b, 0xec, 0x05, 0x0c, 0xf5, 0xa9, 0x37, 0x78, 0x81, 0x14, 0x05, 0x49, 0xb0, - 0x5c, 0x1c, 0x3d, 0xe1, 0xd3, 0x93, 0xfc, 0xed, 0x35, 0x2a, 0x3d, 0x26, 0x7b, 0x09, 0xa1, 0x25, - 0x65, 0x68, 0xa5, 0x08, 0xa3, 0x99, 0x4b, 0x8b, 0xf9, 0xd8, 0x1d, 0xdf, 0x76, 0xc7, 0x2f, 0xb7, - 0xdd, 0xc9, 0x89, 0xcc, 0x8e, 0xe1, 0x1e, 0xb6, 0xb9, 0xcb, 0x9b, 0xff, 0x33, 0x6f, 0x4b, 0x65, - 0x07, 0xb0, 0x5f, 0x57, 0x4d, 0x45, 0xd1, 0x5e, 0x12, 0x2c, 0xe7, 0x72, 0xbc, 0xb0, 0x67, 0x00, - 0xad, 0x6a, 0xf0, 0xdc, 0x60, 0x51, 0xfd, 0x88, 0xf6, 0x93, 0x60, 0x19, 0x4a, 0x2f, 0x12, 0x4b, - 0x78, 0x7c, 0x43, 0xb5, 0xed, 0x74, 0x6b, 0x91, 0xbd, 0x82, 0x45, 0x3e, 0x86, 0xce, 0xda, 0x42, - 0x6f, 0x74, 0x3f, 0xf5, 0x75, 0xaf, 0x26, 0x58, 0xfa, 0xdc, 0xf4, 0x1d, 0xc0, 0x34, 0x93, 0xa1, - 0x03, 0x6c, 0xa9, 0xa2, 0xab, 0xf7, 0xaa, 0x41, 0x57, 0x27, 0x94, 0x5e, 0x64, 0xc0, 0x37, 0x53, - 0x3b, 0xcb, 0x6d, 0x34, 0x4b, 0xe6, 0x03, 0x3e, 0x45, 0xd2, 0xd7, 0xb0, 0xf0, 0x5e, 0x62, 0x0c, - 0xf6, 0xda, 0xa9, 0x90, 0x3b, 0xb3, 0x18, 0xee, 0x93, 0xca, 0x6a, 0xfc, 0x60, 0x6a, 0x37, 0xe9, - 0x50, 0x5e, 0xdf, 0x8f, 0x7e, 0x05, 0xf0, 0x70, 0xf7, 0xbf, 0xd9, 0x4f, 0x78, 0xb0, 0xa3, 0x99, - 0x1d, 0xdf, 0x22, 0xcb, 0x37, 0x07, 0xbf, 0xcd, 0x18, 0xf1, 0xc9, 0x7f, 0x66, 0x8d, 0x83, 0x4d, - 0xef, 0x9c, 0x7e, 0x04, 0xcf, 0xb1, 0xa7, 0x37, 0xdc, 0x78, 0x3e, 0x7c, 0xf1, 0xa7, 0x93, 0xb2, - 0xa2, 0xcf, 0x7d, 0xc6, 0xd7, 0xba, 0x11, 0xa5, 0xfe, 0x82, 0x5f, 0xc5, 0xb8, 0x04, 0xce, 0x00, - 0x56, 0x94, 0xd8, 0xa2, 0x51, 0x84, 0xb9, 0x28, 0xb5, 0x98, 0xd6, 0x23, 0xbb, 0xeb, 0xf0, 0xe7, - 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x9e, 0x4d, 0xe0, 0x33, 0x03, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/JobService.pb.go b/protos/generated/go/feast/core/JobService.pb.go deleted file mode 100644 index 6a3a4e6dc0d..00000000000 --- a/protos/generated/go/feast/core/JobService.pb.go +++ /dev/null @@ -1,686 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/core/JobService.proto - -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type JobServiceTypes struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes) Reset() { *m = JobServiceTypes{} } -func (m *JobServiceTypes) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes) ProtoMessage() {} -func (*JobServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0} -} -func (m *JobServiceTypes) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes.Unmarshal(m, b) -} -func (m *JobServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes.Merge(dst, src) -} -func (m *JobServiceTypes) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes.Size(m) -} -func (m *JobServiceTypes) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes proto.InternalMessageInfo - -type JobServiceTypes_SubmitImportJobRequest struct { - ImportSpec *specs.ImportSpec `protobuf:"bytes,1,opt,name=importSpec,proto3" json:"importSpec,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_SubmitImportJobRequest) Reset() { - *m = JobServiceTypes_SubmitImportJobRequest{} -} -func (m *JobServiceTypes_SubmitImportJobRequest) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_SubmitImportJobRequest) ProtoMessage() {} -func (*JobServiceTypes_SubmitImportJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 0} -} -func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Unmarshal(m, b) -} -func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_SubmitImportJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Merge(dst, src) -} -func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Size(m) -} -func (m *JobServiceTypes_SubmitImportJobRequest) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest proto.InternalMessageInfo - -func (m *JobServiceTypes_SubmitImportJobRequest) GetImportSpec() *specs.ImportSpec { - if m != nil { - return m.ImportSpec - } - return nil -} - -func (m *JobServiceTypes_SubmitImportJobRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -type JobServiceTypes_SubmitImportJobResponse struct { - JobId string `protobuf:"bytes,1,opt,name=jobId,proto3" json:"jobId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_SubmitImportJobResponse) Reset() { - *m = JobServiceTypes_SubmitImportJobResponse{} -} -func (m *JobServiceTypes_SubmitImportJobResponse) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_SubmitImportJobResponse) ProtoMessage() {} -func (*JobServiceTypes_SubmitImportJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 1} -} -func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Unmarshal(m, b) -} -func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_SubmitImportJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Merge(dst, src) -} -func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Size(m) -} -func (m *JobServiceTypes_SubmitImportJobResponse) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse proto.InternalMessageInfo - -func (m *JobServiceTypes_SubmitImportJobResponse) GetJobId() string { - if m != nil { - return m.JobId - } - return "" -} - -type JobServiceTypes_ListJobsResponse struct { - Jobs []*JobServiceTypes_JobDetail `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_ListJobsResponse) Reset() { *m = JobServiceTypes_ListJobsResponse{} } -func (m *JobServiceTypes_ListJobsResponse) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_ListJobsResponse) ProtoMessage() {} -func (*JobServiceTypes_ListJobsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 2} -} -func (m *JobServiceTypes_ListJobsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Unmarshal(m, b) -} -func (m *JobServiceTypes_ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_ListJobsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_ListJobsResponse.Merge(dst, src) -} -func (m *JobServiceTypes_ListJobsResponse) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Size(m) -} -func (m *JobServiceTypes_ListJobsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_ListJobsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_ListJobsResponse proto.InternalMessageInfo - -func (m *JobServiceTypes_ListJobsResponse) GetJobs() []*JobServiceTypes_JobDetail { - if m != nil { - return m.Jobs - } - return nil -} - -type JobServiceTypes_GetJobRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_GetJobRequest) Reset() { *m = JobServiceTypes_GetJobRequest{} } -func (m *JobServiceTypes_GetJobRequest) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_GetJobRequest) ProtoMessage() {} -func (*JobServiceTypes_GetJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 3} -} -func (m *JobServiceTypes_GetJobRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_GetJobRequest.Unmarshal(m, b) -} -func (m *JobServiceTypes_GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_GetJobRequest.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_GetJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_GetJobRequest.Merge(dst, src) -} -func (m *JobServiceTypes_GetJobRequest) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_GetJobRequest.Size(m) -} -func (m *JobServiceTypes_GetJobRequest) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_GetJobRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_GetJobRequest proto.InternalMessageInfo - -func (m *JobServiceTypes_GetJobRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type JobServiceTypes_GetJobResponse struct { - Job *JobServiceTypes_JobDetail `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_GetJobResponse) Reset() { *m = JobServiceTypes_GetJobResponse{} } -func (m *JobServiceTypes_GetJobResponse) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_GetJobResponse) ProtoMessage() {} -func (*JobServiceTypes_GetJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 4} -} -func (m *JobServiceTypes_GetJobResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_GetJobResponse.Unmarshal(m, b) -} -func (m *JobServiceTypes_GetJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_GetJobResponse.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_GetJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_GetJobResponse.Merge(dst, src) -} -func (m *JobServiceTypes_GetJobResponse) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_GetJobResponse.Size(m) -} -func (m *JobServiceTypes_GetJobResponse) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_GetJobResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_GetJobResponse proto.InternalMessageInfo - -func (m *JobServiceTypes_GetJobResponse) GetJob() *JobServiceTypes_JobDetail { - if m != nil { - return m.Job - } - return nil -} - -type JobServiceTypes_AbortJobRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_AbortJobRequest) Reset() { *m = JobServiceTypes_AbortJobRequest{} } -func (m *JobServiceTypes_AbortJobRequest) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_AbortJobRequest) ProtoMessage() {} -func (*JobServiceTypes_AbortJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 5} -} -func (m *JobServiceTypes_AbortJobRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Unmarshal(m, b) -} -func (m *JobServiceTypes_AbortJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_AbortJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_AbortJobRequest.Merge(dst, src) -} -func (m *JobServiceTypes_AbortJobRequest) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Size(m) -} -func (m *JobServiceTypes_AbortJobRequest) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_AbortJobRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_AbortJobRequest proto.InternalMessageInfo - -func (m *JobServiceTypes_AbortJobRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type JobServiceTypes_AbortJobResponse struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_AbortJobResponse) Reset() { *m = JobServiceTypes_AbortJobResponse{} } -func (m *JobServiceTypes_AbortJobResponse) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_AbortJobResponse) ProtoMessage() {} -func (*JobServiceTypes_AbortJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 6} -} -func (m *JobServiceTypes_AbortJobResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Unmarshal(m, b) -} -func (m *JobServiceTypes_AbortJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_AbortJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_AbortJobResponse.Merge(dst, src) -} -func (m *JobServiceTypes_AbortJobResponse) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Size(m) -} -func (m *JobServiceTypes_AbortJobResponse) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_AbortJobResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_AbortJobResponse proto.InternalMessageInfo - -func (m *JobServiceTypes_AbortJobResponse) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -// Expanded view of a given job. Returns job information, as well -// as latest metrics. -type JobServiceTypes_JobDetail struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - ExtId string `protobuf:"bytes,2,opt,name=extId,proto3" json:"extId,omitempty"` - Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` - Runner string `protobuf:"bytes,4,opt,name=runner,proto3" json:"runner,omitempty"` - Status string `protobuf:"bytes,5,opt,name=status,proto3" json:"status,omitempty"` - Entities []string `protobuf:"bytes,6,rep,name=entities,proto3" json:"entities,omitempty"` - Features []string `protobuf:"bytes,7,rep,name=features,proto3" json:"features,omitempty"` - Metrics map[string]float64 `protobuf:"bytes,8,rep,name=metrics,proto3" json:"metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"` - LastUpdated *timestamp.Timestamp `protobuf:"bytes,9,opt,name=lastUpdated,proto3" json:"lastUpdated,omitempty"` - Created *timestamp.Timestamp `protobuf:"bytes,10,opt,name=created,proto3" json:"created,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *JobServiceTypes_JobDetail) Reset() { *m = JobServiceTypes_JobDetail{} } -func (m *JobServiceTypes_JobDetail) String() string { return proto.CompactTextString(m) } -func (*JobServiceTypes_JobDetail) ProtoMessage() {} -func (*JobServiceTypes_JobDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 7} -} -func (m *JobServiceTypes_JobDetail) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobServiceTypes_JobDetail.Unmarshal(m, b) -} -func (m *JobServiceTypes_JobDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobServiceTypes_JobDetail.Marshal(b, m, deterministic) -} -func (dst *JobServiceTypes_JobDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_JobDetail.Merge(dst, src) -} -func (m *JobServiceTypes_JobDetail) XXX_Size() int { - return xxx_messageInfo_JobServiceTypes_JobDetail.Size(m) -} -func (m *JobServiceTypes_JobDetail) XXX_DiscardUnknown() { - xxx_messageInfo_JobServiceTypes_JobDetail.DiscardUnknown(m) -} - -var xxx_messageInfo_JobServiceTypes_JobDetail proto.InternalMessageInfo - -func (m *JobServiceTypes_JobDetail) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *JobServiceTypes_JobDetail) GetExtId() string { - if m != nil { - return m.ExtId - } - return "" -} - -func (m *JobServiceTypes_JobDetail) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *JobServiceTypes_JobDetail) GetRunner() string { - if m != nil { - return m.Runner - } - return "" -} - -func (m *JobServiceTypes_JobDetail) GetStatus() string { - if m != nil { - return m.Status - } - return "" -} - -func (m *JobServiceTypes_JobDetail) GetEntities() []string { - if m != nil { - return m.Entities - } - return nil -} - -func (m *JobServiceTypes_JobDetail) GetFeatures() []string { - if m != nil { - return m.Features - } - return nil -} - -func (m *JobServiceTypes_JobDetail) GetMetrics() map[string]float64 { - if m != nil { - return m.Metrics - } - return nil -} - -func (m *JobServiceTypes_JobDetail) GetLastUpdated() *timestamp.Timestamp { - if m != nil { - return m.LastUpdated - } - return nil -} - -func (m *JobServiceTypes_JobDetail) GetCreated() *timestamp.Timestamp { - if m != nil { - return m.Created - } - return nil -} - -func init() { - proto.RegisterType((*JobServiceTypes)(nil), "feast.core.JobServiceTypes") - proto.RegisterType((*JobServiceTypes_SubmitImportJobRequest)(nil), "feast.core.JobServiceTypes.SubmitImportJobRequest") - proto.RegisterType((*JobServiceTypes_SubmitImportJobResponse)(nil), "feast.core.JobServiceTypes.SubmitImportJobResponse") - proto.RegisterType((*JobServiceTypes_ListJobsResponse)(nil), "feast.core.JobServiceTypes.ListJobsResponse") - proto.RegisterType((*JobServiceTypes_GetJobRequest)(nil), "feast.core.JobServiceTypes.GetJobRequest") - proto.RegisterType((*JobServiceTypes_GetJobResponse)(nil), "feast.core.JobServiceTypes.GetJobResponse") - proto.RegisterType((*JobServiceTypes_AbortJobRequest)(nil), "feast.core.JobServiceTypes.AbortJobRequest") - proto.RegisterType((*JobServiceTypes_AbortJobResponse)(nil), "feast.core.JobServiceTypes.AbortJobResponse") - proto.RegisterType((*JobServiceTypes_JobDetail)(nil), "feast.core.JobServiceTypes.JobDetail") - proto.RegisterMapType((map[string]float64)(nil), "feast.core.JobServiceTypes.JobDetail.MetricsEntry") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// JobServiceClient is the client API for JobService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type JobServiceClient interface { - // Submit a job to feast to run. Returns the job id. - SubmitJob(ctx context.Context, in *JobServiceTypes_SubmitImportJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_SubmitImportJobResponse, error) - // List all jobs submitted to feast. - ListJobs(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*JobServiceTypes_ListJobsResponse, error) - // Get Job with ID - GetJob(ctx context.Context, in *JobServiceTypes_GetJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_GetJobResponse, error) - // Abort job with given ID - AbortJob(ctx context.Context, in *JobServiceTypes_AbortJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_AbortJobResponse, error) -} - -type jobServiceClient struct { - cc *grpc.ClientConn -} - -func NewJobServiceClient(cc *grpc.ClientConn) JobServiceClient { - return &jobServiceClient{cc} -} - -func (c *jobServiceClient) SubmitJob(ctx context.Context, in *JobServiceTypes_SubmitImportJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_SubmitImportJobResponse, error) { - out := new(JobServiceTypes_SubmitImportJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/SubmitJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) ListJobs(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*JobServiceTypes_ListJobsResponse, error) { - out := new(JobServiceTypes_ListJobsResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/ListJobs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) GetJob(ctx context.Context, in *JobServiceTypes_GetJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_GetJobResponse, error) { - out := new(JobServiceTypes_GetJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/GetJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) AbortJob(ctx context.Context, in *JobServiceTypes_AbortJobRequest, opts ...grpc.CallOption) (*JobServiceTypes_AbortJobResponse, error) { - out := new(JobServiceTypes_AbortJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/AbortJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// JobServiceServer is the server API for JobService service. -type JobServiceServer interface { - // Submit a job to feast to run. Returns the job id. - SubmitJob(context.Context, *JobServiceTypes_SubmitImportJobRequest) (*JobServiceTypes_SubmitImportJobResponse, error) - // List all jobs submitted to feast. - ListJobs(context.Context, *empty.Empty) (*JobServiceTypes_ListJobsResponse, error) - // Get Job with ID - GetJob(context.Context, *JobServiceTypes_GetJobRequest) (*JobServiceTypes_GetJobResponse, error) - // Abort job with given ID - AbortJob(context.Context, *JobServiceTypes_AbortJobRequest) (*JobServiceTypes_AbortJobResponse, error) -} - -func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { - s.RegisterService(&_JobService_serviceDesc, srv) -} - -func _JobService_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(JobServiceTypes_SubmitImportJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).SubmitJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/SubmitJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).SubmitJob(ctx, req.(*JobServiceTypes_SubmitImportJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).ListJobs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/ListJobs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).ListJobs(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(JobServiceTypes_GetJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).GetJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/GetJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).GetJob(ctx, req.(*JobServiceTypes_GetJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_AbortJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(JobServiceTypes_AbortJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).AbortJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/AbortJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).AbortJob(ctx, req.(*JobServiceTypes_AbortJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _JobService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.JobService", - HandlerType: (*JobServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "SubmitJob", - Handler: _JobService_SubmitJob_Handler, - }, - { - MethodName: "ListJobs", - Handler: _JobService_ListJobs_Handler, - }, - { - MethodName: "GetJob", - Handler: _JobService_GetJob_Handler, - }, - { - MethodName: "AbortJob", - Handler: _JobService_AbortJob_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/JobService.proto", -} - -func init() { - proto.RegisterFile("feast/core/JobService.proto", fileDescriptor_JobService_edcd183b773c9f62) -} - -var fileDescriptor_JobService_edcd183b773c9f62 = []byte{ - // 621 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x4e, 0xdb, 0x4c, - 0x10, 0x55, 0x62, 0x08, 0xf1, 0xf0, 0x7d, 0x80, 0x56, 0x15, 0x58, 0x4b, 0x25, 0x52, 0xa4, 0x4a, - 0xe9, 0x8f, 0x6c, 0x29, 0xb4, 0xa2, 0x45, 0xbd, 0x29, 0x2a, 0xaa, 0x82, 0x40, 0x42, 0x86, 0xde, - 0xf4, 0xa6, 0xb2, 0x9d, 0xc1, 0xdd, 0x10, 0x7b, 0x5d, 0xef, 0x1a, 0x35, 0xef, 0xd2, 0x37, 0xe8, - 0x13, 0xf5, 0x6d, 0xaa, 0xdd, 0xb5, 0x63, 0x63, 0xaa, 0x94, 0xde, 0xf9, 0xec, 0x9c, 0xd9, 0xb3, - 0x73, 0x3c, 0x33, 0xb0, 0x7b, 0x8d, 0x81, 0x90, 0x5e, 0xc4, 0x73, 0xf4, 0x4e, 0x79, 0x78, 0x89, - 0xf9, 0x2d, 0x8b, 0xd0, 0xcd, 0x72, 0x2e, 0x39, 0x01, 0x1d, 0x74, 0x55, 0x90, 0x3e, 0x36, 0x44, - 0x91, 0x61, 0x24, 0xbc, 0x71, 0x92, 0xf1, 0x5c, 0x5e, 0x66, 0x18, 0x19, 0x26, 0xdd, 0x8d, 0x39, - 0x8f, 0x67, 0xe8, 0x69, 0x14, 0x16, 0xd7, 0x1e, 0x26, 0x99, 0x9c, 0x97, 0xc1, 0xbd, 0x76, 0x50, - 0xb2, 0x04, 0x85, 0x0c, 0x92, 0xcc, 0x10, 0xf6, 0x7f, 0xf5, 0x60, 0xb3, 0x16, 0xbf, 0x9a, 0x67, - 0x28, 0x28, 0xc2, 0xf6, 0x65, 0x11, 0x26, 0x4c, 0x1a, 0xad, 0x53, 0x1e, 0xfa, 0xf8, 0xad, 0x40, - 0x21, 0xc9, 0x21, 0x00, 0x5b, 0xe8, 0x3b, 0x9d, 0x41, 0x67, 0xb8, 0x3e, 0xda, 0x71, 0xcd, 0x53, - 0xf5, 0xf3, 0xdc, 0xfa, 0x79, 0x7e, 0x83, 0x4a, 0x08, 0xac, 0xa4, 0x41, 0x82, 0x4e, 0x77, 0xd0, - 0x19, 0xda, 0xbe, 0xfe, 0xa6, 0x1e, 0xec, 0xdc, 0x93, 0x11, 0x19, 0x4f, 0x05, 0x92, 0x47, 0xb0, - 0x3a, 0xe5, 0xe1, 0x78, 0xa2, 0x25, 0x6c, 0xdf, 0x00, 0x7a, 0x0e, 0x5b, 0x67, 0x4c, 0x28, 0xa2, - 0x58, 0x30, 0xdf, 0xc2, 0xca, 0x94, 0x87, 0xc2, 0xe9, 0x0c, 0xac, 0xe1, 0xfa, 0xe8, 0xa9, 0x5b, - 0xdb, 0xe6, 0xb6, 0xca, 0x52, 0xf8, 0x03, 0xca, 0x80, 0xcd, 0x7c, 0x9d, 0x42, 0xf7, 0xe0, 0xff, - 0x8f, 0xd8, 0xac, 0x6e, 0x03, 0xba, 0xac, 0x92, 0xec, 0xb2, 0x09, 0x1d, 0xc3, 0x46, 0x45, 0x28, - 0xd5, 0x0e, 0xc1, 0x9a, 0xf2, 0xb0, 0x2c, 0xfc, 0x81, 0x62, 0x2a, 0x83, 0x3e, 0x81, 0xcd, 0xf7, - 0xe1, 0x5d, 0x2f, 0xdb, 0x6a, 0xfb, 0xb0, 0x55, 0x53, 0x4a, 0xbd, 0x36, 0xe7, 0xa7, 0x05, 0xf6, - 0xe2, 0xe6, 0x76, 0x54, 0xb9, 0x86, 0xdf, 0xe5, 0x78, 0x52, 0xba, 0x6c, 0x80, 0xb2, 0x5e, 0xce, - 0x33, 0x74, 0x2c, 0x63, 0xbd, 0xfa, 0x26, 0xdb, 0xd0, 0xcb, 0x8b, 0x34, 0xc5, 0xdc, 0x59, 0xd1, - 0xa7, 0x25, 0x52, 0xe7, 0x42, 0x06, 0xb2, 0x10, 0xce, 0xaa, 0x39, 0x37, 0x88, 0x50, 0xe8, 0x63, - 0x2a, 0x99, 0x64, 0x28, 0x9c, 0xde, 0xc0, 0x1a, 0xda, 0xfe, 0x02, 0xab, 0xd8, 0x35, 0x06, 0xb2, - 0xc8, 0x51, 0x38, 0x6b, 0x26, 0x56, 0x61, 0x72, 0x06, 0x6b, 0x09, 0xca, 0x9c, 0x45, 0xc2, 0xe9, - 0xeb, 0x1f, 0x34, 0x7a, 0x90, 0x67, 0xee, 0xb9, 0x49, 0x3a, 0x49, 0x65, 0x3e, 0xf7, 0xab, 0x2b, - 0xc8, 0x3b, 0x58, 0x9f, 0x05, 0x42, 0x7e, 0xca, 0x26, 0x81, 0xc4, 0x89, 0x63, 0xeb, 0xbf, 0x40, - 0x5d, 0xd3, 0xe2, 0x6e, 0xd5, 0xe2, 0xee, 0x55, 0xd5, 0xe2, 0x7e, 0x93, 0x4e, 0x5e, 0xc1, 0x5a, - 0x94, 0xa3, 0xce, 0x84, 0xbf, 0x66, 0x56, 0x54, 0x7a, 0x04, 0xff, 0x35, 0x1f, 0x43, 0xb6, 0xc0, - 0xba, 0xc1, 0x79, 0x69, 0xba, 0xfa, 0x54, 0xae, 0xdf, 0x06, 0xb3, 0xc2, 0xf4, 0x76, 0xc7, 0x37, - 0xe0, 0xa8, 0xfb, 0xa6, 0x33, 0xfa, 0x61, 0x01, 0xd4, 0x35, 0x12, 0x09, 0xb6, 0xe9, 0xf7, 0x53, - 0x1e, 0x92, 0xa5, 0x46, 0xfc, 0x79, 0xfa, 0xe8, 0xc1, 0x3f, 0xe5, 0x94, 0x2d, 0x74, 0x01, 0xfd, - 0x6a, 0x68, 0xc8, 0xf6, 0xbd, 0x8a, 0x4f, 0xd4, 0xae, 0xa0, 0x2f, 0x97, 0x5d, 0x7c, 0x6f, 0xe4, - 0xbe, 0x40, 0xcf, 0x8c, 0x05, 0x79, 0xb6, 0x2c, 0xef, 0xce, 0x6c, 0xd1, 0xe7, 0x0f, 0xa1, 0x96, - 0x02, 0x08, 0xfd, 0x6a, 0x12, 0xc8, 0x8b, 0x65, 0x79, 0xad, 0x91, 0x5a, 0x5e, 0x47, 0x7b, 0xb8, - 0x8e, 0xaf, 0xa0, 0xb1, 0x64, 0x8f, 0x1b, 0x5b, 0xf0, 0x42, 0xb9, 0xf3, 0xf9, 0x75, 0xcc, 0xe4, - 0xd7, 0x22, 0x74, 0x23, 0x9e, 0x78, 0x31, 0x9f, 0xe2, 0x8d, 0x67, 0xd6, 0xb0, 0xf6, 0x4e, 0x78, - 0x31, 0xa6, 0x98, 0xab, 0x16, 0xf1, 0x62, 0xee, 0xd5, 0x9b, 0x3c, 0xec, 0xe9, 0xf8, 0xc1, 0xef, - 0x00, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xf4, 0xb6, 0x7c, 0xde, 0x05, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/UIService.pb.go b/protos/generated/go/feast/core/UIService.pb.go deleted file mode 100644 index 772c548c875..00000000000 --- a/protos/generated/go/feast/core/UIService.pb.go +++ /dev/null @@ -1,1177 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/core/UIService.proto - -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type UIServiceTypes struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes) Reset() { *m = UIServiceTypes{} } -func (m *UIServiceTypes) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes) ProtoMessage() {} -func (*UIServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0} -} -func (m *UIServiceTypes) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes.Unmarshal(m, b) -} -func (m *UIServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes.Merge(dst, src) -} -func (m *UIServiceTypes) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes.Size(m) -} -func (m *UIServiceTypes) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes proto.InternalMessageInfo - -// Expanded entity spec -type UIServiceTypes_EntityDetail struct { - Spec *specs.EntitySpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - Jobs []string `protobuf:"bytes,2,rep,name=jobs,proto3" json:"jobs,omitempty"` - LastUpdated *timestamp.Timestamp `protobuf:"bytes,3,opt,name=lastUpdated,proto3" json:"lastUpdated,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_EntityDetail) Reset() { *m = UIServiceTypes_EntityDetail{} } -func (m *UIServiceTypes_EntityDetail) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_EntityDetail) ProtoMessage() {} -func (*UIServiceTypes_EntityDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 0} -} -func (m *UIServiceTypes_EntityDetail) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_EntityDetail.Unmarshal(m, b) -} -func (m *UIServiceTypes_EntityDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_EntityDetail.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_EntityDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_EntityDetail.Merge(dst, src) -} -func (m *UIServiceTypes_EntityDetail) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_EntityDetail.Size(m) -} -func (m *UIServiceTypes_EntityDetail) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_EntityDetail.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_EntityDetail proto.InternalMessageInfo - -func (m *UIServiceTypes_EntityDetail) GetSpec() *specs.EntitySpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *UIServiceTypes_EntityDetail) GetJobs() []string { - if m != nil { - return m.Jobs - } - return nil -} - -func (m *UIServiceTypes_EntityDetail) GetLastUpdated() *timestamp.Timestamp { - if m != nil { - return m.LastUpdated - } - return nil -} - -type UIServiceTypes_GetEntityRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetEntityRequest) Reset() { *m = UIServiceTypes_GetEntityRequest{} } -func (m *UIServiceTypes_GetEntityRequest) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetEntityRequest) ProtoMessage() {} -func (*UIServiceTypes_GetEntityRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 1} -} -func (m *UIServiceTypes_GetEntityRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetEntityRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetEntityRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetEntityRequest.Merge(dst, src) -} -func (m *UIServiceTypes_GetEntityRequest) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Size(m) -} -func (m *UIServiceTypes_GetEntityRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetEntityRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetEntityRequest proto.InternalMessageInfo - -func (m *UIServiceTypes_GetEntityRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type UIServiceTypes_GetEntityResponse struct { - Entity *UIServiceTypes_EntityDetail `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetEntityResponse) Reset() { *m = UIServiceTypes_GetEntityResponse{} } -func (m *UIServiceTypes_GetEntityResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetEntityResponse) ProtoMessage() {} -func (*UIServiceTypes_GetEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 2} -} -func (m *UIServiceTypes_GetEntityResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetEntityResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetEntityResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetEntityResponse.Merge(dst, src) -} -func (m *UIServiceTypes_GetEntityResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Size(m) -} -func (m *UIServiceTypes_GetEntityResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetEntityResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetEntityResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_GetEntityResponse) GetEntity() *UIServiceTypes_EntityDetail { - if m != nil { - return m.Entity - } - return nil -} - -type UIServiceTypes_ListEntitiesResponse struct { - Entities []*UIServiceTypes_EntityDetail `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_ListEntitiesResponse) Reset() { *m = UIServiceTypes_ListEntitiesResponse{} } -func (m *UIServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_ListEntitiesResponse) ProtoMessage() {} -func (*UIServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 3} -} -func (m *UIServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_ListEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Merge(dst, src) -} -func (m *UIServiceTypes_ListEntitiesResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Size(m) -} -func (m *UIServiceTypes_ListEntitiesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_ListEntitiesResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_ListEntitiesResponse) GetEntities() []*UIServiceTypes_EntityDetail { - if m != nil { - return m.Entities - } - return nil -} - -// Expanded feature spec -type UIServiceTypes_FeatureDetail struct { - Spec *specs.FeatureSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - BigqueryView string `protobuf:"bytes,2,opt,name=bigqueryView,proto3" json:"bigqueryView,omitempty"` - Enabled bool `protobuf:"varint,3,opt,name=enabled,proto3" json:"enabled,omitempty"` - Jobs []string `protobuf:"bytes,4,rep,name=jobs,proto3" json:"jobs,omitempty"` - LastUpdated *timestamp.Timestamp `protobuf:"bytes,5,opt,name=lastUpdated,proto3" json:"lastUpdated,omitempty"` - Created *timestamp.Timestamp `protobuf:"bytes,6,opt,name=created,proto3" json:"created,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_FeatureDetail) Reset() { *m = UIServiceTypes_FeatureDetail{} } -func (m *UIServiceTypes_FeatureDetail) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_FeatureDetail) ProtoMessage() {} -func (*UIServiceTypes_FeatureDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 4} -} -func (m *UIServiceTypes_FeatureDetail) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_FeatureDetail.Unmarshal(m, b) -} -func (m *UIServiceTypes_FeatureDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_FeatureDetail.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_FeatureDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_FeatureDetail.Merge(dst, src) -} -func (m *UIServiceTypes_FeatureDetail) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_FeatureDetail.Size(m) -} -func (m *UIServiceTypes_FeatureDetail) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_FeatureDetail.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_FeatureDetail proto.InternalMessageInfo - -func (m *UIServiceTypes_FeatureDetail) GetSpec() *specs.FeatureSpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *UIServiceTypes_FeatureDetail) GetBigqueryView() string { - if m != nil { - return m.BigqueryView - } - return "" -} - -func (m *UIServiceTypes_FeatureDetail) GetEnabled() bool { - if m != nil { - return m.Enabled - } - return false -} - -func (m *UIServiceTypes_FeatureDetail) GetJobs() []string { - if m != nil { - return m.Jobs - } - return nil -} - -func (m *UIServiceTypes_FeatureDetail) GetLastUpdated() *timestamp.Timestamp { - if m != nil { - return m.LastUpdated - } - return nil -} - -func (m *UIServiceTypes_FeatureDetail) GetCreated() *timestamp.Timestamp { - if m != nil { - return m.Created - } - return nil -} - -type UIServiceTypes_GetFeatureRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetFeatureRequest) Reset() { *m = UIServiceTypes_GetFeatureRequest{} } -func (m *UIServiceTypes_GetFeatureRequest) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetFeatureRequest) ProtoMessage() {} -func (*UIServiceTypes_GetFeatureRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 5} -} -func (m *UIServiceTypes_GetFeatureRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetFeatureRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetFeatureRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Merge(dst, src) -} -func (m *UIServiceTypes_GetFeatureRequest) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Size(m) -} -func (m *UIServiceTypes_GetFeatureRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetFeatureRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetFeatureRequest proto.InternalMessageInfo - -func (m *UIServiceTypes_GetFeatureRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type UIServiceTypes_GetFeatureResponse struct { - Feature *UIServiceTypes_FeatureDetail `protobuf:"bytes,1,opt,name=feature,proto3" json:"feature,omitempty"` - RawSpec *specs.FeatureSpec `protobuf:"bytes,2,opt,name=rawSpec,proto3" json:"rawSpec,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetFeatureResponse) Reset() { *m = UIServiceTypes_GetFeatureResponse{} } -func (m *UIServiceTypes_GetFeatureResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetFeatureResponse) ProtoMessage() {} -func (*UIServiceTypes_GetFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 6} -} -func (m *UIServiceTypes_GetFeatureResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetFeatureResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetFeatureResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Merge(dst, src) -} -func (m *UIServiceTypes_GetFeatureResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Size(m) -} -func (m *UIServiceTypes_GetFeatureResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetFeatureResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetFeatureResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_GetFeatureResponse) GetFeature() *UIServiceTypes_FeatureDetail { - if m != nil { - return m.Feature - } - return nil -} - -func (m *UIServiceTypes_GetFeatureResponse) GetRawSpec() *specs.FeatureSpec { - if m != nil { - return m.RawSpec - } - return nil -} - -type UIServiceTypes_ListFeaturesResponse struct { - Features []*UIServiceTypes_FeatureDetail `protobuf:"bytes,1,rep,name=features,proto3" json:"features,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_ListFeaturesResponse) Reset() { *m = UIServiceTypes_ListFeaturesResponse{} } -func (m *UIServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_ListFeaturesResponse) ProtoMessage() {} -func (*UIServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 7} -} -func (m *UIServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_ListFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Merge(dst, src) -} -func (m *UIServiceTypes_ListFeaturesResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Size(m) -} -func (m *UIServiceTypes_ListFeaturesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_ListFeaturesResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_ListFeaturesResponse) GetFeatures() []*UIServiceTypes_FeatureDetail { - if m != nil { - return m.Features - } - return nil -} - -// Expanded feature group spec -type UIServiceTypes_FeatureGroupDetail struct { - Spec *specs.FeatureGroupSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - LastUpdated *timestamp.Timestamp `protobuf:"bytes,2,opt,name=lastUpdated,proto3" json:"lastUpdated,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_FeatureGroupDetail) Reset() { *m = UIServiceTypes_FeatureGroupDetail{} } -func (m *UIServiceTypes_FeatureGroupDetail) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_FeatureGroupDetail) ProtoMessage() {} -func (*UIServiceTypes_FeatureGroupDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 8} -} -func (m *UIServiceTypes_FeatureGroupDetail) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Unmarshal(m, b) -} -func (m *UIServiceTypes_FeatureGroupDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_FeatureGroupDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Merge(dst, src) -} -func (m *UIServiceTypes_FeatureGroupDetail) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Size(m) -} -func (m *UIServiceTypes_FeatureGroupDetail) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_FeatureGroupDetail proto.InternalMessageInfo - -func (m *UIServiceTypes_FeatureGroupDetail) GetSpec() *specs.FeatureGroupSpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *UIServiceTypes_FeatureGroupDetail) GetLastUpdated() *timestamp.Timestamp { - if m != nil { - return m.LastUpdated - } - return nil -} - -type UIServiceTypes_GetFeatureGroupRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetFeatureGroupRequest) Reset() { *m = UIServiceTypes_GetFeatureGroupRequest{} } -func (m *UIServiceTypes_GetFeatureGroupRequest) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetFeatureGroupRequest) ProtoMessage() {} -func (*UIServiceTypes_GetFeatureGroupRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 9} -} -func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetFeatureGroupRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Merge(dst, src) -} -func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Size(m) -} -func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest proto.InternalMessageInfo - -func (m *UIServiceTypes_GetFeatureGroupRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type UIServiceTypes_GetFeatureGroupResponse struct { - FeatureGroup *UIServiceTypes_FeatureGroupDetail `protobuf:"bytes,1,opt,name=featureGroup,proto3" json:"featureGroup,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetFeatureGroupResponse) Reset() { - *m = UIServiceTypes_GetFeatureGroupResponse{} -} -func (m *UIServiceTypes_GetFeatureGroupResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetFeatureGroupResponse) ProtoMessage() {} -func (*UIServiceTypes_GetFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 10} -} -func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetFeatureGroupResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Merge(dst, src) -} -func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Size(m) -} -func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_GetFeatureGroupResponse) GetFeatureGroup() *UIServiceTypes_FeatureGroupDetail { - if m != nil { - return m.FeatureGroup - } - return nil -} - -type UIServiceTypes_ListFeatureGroupsResponse struct { - FeatureGroups []*UIServiceTypes_FeatureGroupDetail `protobuf:"bytes,1,rep,name=featureGroups,proto3" json:"featureGroups,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_ListFeatureGroupsResponse) Reset() { - *m = UIServiceTypes_ListFeatureGroupsResponse{} -} -func (m *UIServiceTypes_ListFeatureGroupsResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_ListFeatureGroupsResponse) ProtoMessage() {} -func (*UIServiceTypes_ListFeatureGroupsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 11} -} -func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_ListFeatureGroupsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Merge(dst, src) -} -func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Size(m) -} -func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_ListFeatureGroupsResponse) GetFeatureGroups() []*UIServiceTypes_FeatureGroupDetail { - if m != nil { - return m.FeatureGroups - } - return nil -} - -// Expanded storage spec -type UIServiceTypes_StorageDetail struct { - Spec *specs.StorageSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - LastUpdated *timestamp.Timestamp `protobuf:"bytes,2,opt,name=lastUpdated,proto3" json:"lastUpdated,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_StorageDetail) Reset() { *m = UIServiceTypes_StorageDetail{} } -func (m *UIServiceTypes_StorageDetail) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_StorageDetail) ProtoMessage() {} -func (*UIServiceTypes_StorageDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 12} -} -func (m *UIServiceTypes_StorageDetail) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_StorageDetail.Unmarshal(m, b) -} -func (m *UIServiceTypes_StorageDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_StorageDetail.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_StorageDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_StorageDetail.Merge(dst, src) -} -func (m *UIServiceTypes_StorageDetail) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_StorageDetail.Size(m) -} -func (m *UIServiceTypes_StorageDetail) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_StorageDetail.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_StorageDetail proto.InternalMessageInfo - -func (m *UIServiceTypes_StorageDetail) GetSpec() *specs.StorageSpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *UIServiceTypes_StorageDetail) GetLastUpdated() *timestamp.Timestamp { - if m != nil { - return m.LastUpdated - } - return nil -} - -type UIServiceTypes_GetStorageRequest struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetStorageRequest) Reset() { *m = UIServiceTypes_GetStorageRequest{} } -func (m *UIServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetStorageRequest) ProtoMessage() {} -func (*UIServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 13} -} -func (m *UIServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetStorageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetStorageRequest.Merge(dst, src) -} -func (m *UIServiceTypes_GetStorageRequest) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Size(m) -} -func (m *UIServiceTypes_GetStorageRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetStorageRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetStorageRequest proto.InternalMessageInfo - -func (m *UIServiceTypes_GetStorageRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type UIServiceTypes_GetStorageResponse struct { - Storage *UIServiceTypes_StorageDetail `protobuf:"bytes,1,opt,name=storage,proto3" json:"storage,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_GetStorageResponse) Reset() { *m = UIServiceTypes_GetStorageResponse{} } -func (m *UIServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_GetStorageResponse) ProtoMessage() {} -func (*UIServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 14} -} -func (m *UIServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_GetStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetStorageResponse.Merge(dst, src) -} -func (m *UIServiceTypes_GetStorageResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Size(m) -} -func (m *UIServiceTypes_GetStorageResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_GetStorageResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_GetStorageResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_GetStorageResponse) GetStorage() *UIServiceTypes_StorageDetail { - if m != nil { - return m.Storage - } - return nil -} - -type UIServiceTypes_ListStorageResponse struct { - Storage []*UIServiceTypes_StorageDetail `protobuf:"bytes,1,rep,name=storage,proto3" json:"storage,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UIServiceTypes_ListStorageResponse) Reset() { *m = UIServiceTypes_ListStorageResponse{} } -func (m *UIServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } -func (*UIServiceTypes_ListStorageResponse) ProtoMessage() {} -func (*UIServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 15} -} -func (m *UIServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Unmarshal(m, b) -} -func (m *UIServiceTypes_ListStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Marshal(b, m, deterministic) -} -func (dst *UIServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListStorageResponse.Merge(dst, src) -} -func (m *UIServiceTypes_ListStorageResponse) XXX_Size() int { - return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Size(m) -} -func (m *UIServiceTypes_ListStorageResponse) XXX_DiscardUnknown() { - xxx_messageInfo_UIServiceTypes_ListStorageResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_UIServiceTypes_ListStorageResponse proto.InternalMessageInfo - -func (m *UIServiceTypes_ListStorageResponse) GetStorage() []*UIServiceTypes_StorageDetail { - if m != nil { - return m.Storage - } - return nil -} - -func init() { - proto.RegisterType((*UIServiceTypes)(nil), "feast.core.UIServiceTypes") - proto.RegisterType((*UIServiceTypes_EntityDetail)(nil), "feast.core.UIServiceTypes.EntityDetail") - proto.RegisterType((*UIServiceTypes_GetEntityRequest)(nil), "feast.core.UIServiceTypes.GetEntityRequest") - proto.RegisterType((*UIServiceTypes_GetEntityResponse)(nil), "feast.core.UIServiceTypes.GetEntityResponse") - proto.RegisterType((*UIServiceTypes_ListEntitiesResponse)(nil), "feast.core.UIServiceTypes.ListEntitiesResponse") - proto.RegisterType((*UIServiceTypes_FeatureDetail)(nil), "feast.core.UIServiceTypes.FeatureDetail") - proto.RegisterType((*UIServiceTypes_GetFeatureRequest)(nil), "feast.core.UIServiceTypes.GetFeatureRequest") - proto.RegisterType((*UIServiceTypes_GetFeatureResponse)(nil), "feast.core.UIServiceTypes.GetFeatureResponse") - proto.RegisterType((*UIServiceTypes_ListFeaturesResponse)(nil), "feast.core.UIServiceTypes.ListFeaturesResponse") - proto.RegisterType((*UIServiceTypes_FeatureGroupDetail)(nil), "feast.core.UIServiceTypes.FeatureGroupDetail") - proto.RegisterType((*UIServiceTypes_GetFeatureGroupRequest)(nil), "feast.core.UIServiceTypes.GetFeatureGroupRequest") - proto.RegisterType((*UIServiceTypes_GetFeatureGroupResponse)(nil), "feast.core.UIServiceTypes.GetFeatureGroupResponse") - proto.RegisterType((*UIServiceTypes_ListFeatureGroupsResponse)(nil), "feast.core.UIServiceTypes.ListFeatureGroupsResponse") - proto.RegisterType((*UIServiceTypes_StorageDetail)(nil), "feast.core.UIServiceTypes.StorageDetail") - proto.RegisterType((*UIServiceTypes_GetStorageRequest)(nil), "feast.core.UIServiceTypes.GetStorageRequest") - proto.RegisterType((*UIServiceTypes_GetStorageResponse)(nil), "feast.core.UIServiceTypes.GetStorageResponse") - proto.RegisterType((*UIServiceTypes_ListStorageResponse)(nil), "feast.core.UIServiceTypes.ListStorageResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// UIServiceClient is the client API for UIService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type UIServiceClient interface { - // - // Get entity specified in request. - // This process returns a single of entity specs. - GetEntity(ctx context.Context, in *UIServiceTypes_GetEntityRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetEntityResponse, error) - // - // Get all entities. - // This process returns a list of entity specs. - ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListEntitiesResponse, error) - // - // Get feature specified in request. - GetFeature(ctx context.Context, in *UIServiceTypes_GetFeatureRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureResponse, error) - // - // Get all features. - // This process returns a list of feature specs. - ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeaturesResponse, error) - // - // Get feature group specified in request. - GetFeatureGroup(ctx context.Context, in *UIServiceTypes_GetFeatureGroupRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureGroupResponse, error) - // - // Get all feature groups. - // This process returns a list of feature group specs. - ListFeatureGroups(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeatureGroupsResponse, error) - // - // Get storage spec specified in request. - // - GetStorage(ctx context.Context, in *UIServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetStorageResponse, error) - // - // Get all storage specs. - // This process returns a list of storage specs. - ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListStorageResponse, error) -} - -type uIServiceClient struct { - cc *grpc.ClientConn -} - -func NewUIServiceClient(cc *grpc.ClientConn) UIServiceClient { - return &uIServiceClient{cc} -} - -func (c *uIServiceClient) GetEntity(ctx context.Context, in *UIServiceTypes_GetEntityRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetEntityResponse, error) { - out := new(UIServiceTypes_GetEntityResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/GetEntity", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListEntitiesResponse, error) { - out := new(UIServiceTypes_ListEntitiesResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/ListEntities", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) GetFeature(ctx context.Context, in *UIServiceTypes_GetFeatureRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureResponse, error) { - out := new(UIServiceTypes_GetFeatureResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/GetFeature", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeaturesResponse, error) { - out := new(UIServiceTypes_ListFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/ListFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) GetFeatureGroup(ctx context.Context, in *UIServiceTypes_GetFeatureGroupRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureGroupResponse, error) { - out := new(UIServiceTypes_GetFeatureGroupResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/GetFeatureGroup", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) ListFeatureGroups(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeatureGroupsResponse, error) { - out := new(UIServiceTypes_ListFeatureGroupsResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/ListFeatureGroups", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) GetStorage(ctx context.Context, in *UIServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetStorageResponse, error) { - out := new(UIServiceTypes_GetStorageResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/GetStorage", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *uIServiceClient) ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListStorageResponse, error) { - out := new(UIServiceTypes_ListStorageResponse) - err := c.cc.Invoke(ctx, "/feast.core.UIService/ListStorage", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// UIServiceServer is the server API for UIService service. -type UIServiceServer interface { - // - // Get entity specified in request. - // This process returns a single of entity specs. - GetEntity(context.Context, *UIServiceTypes_GetEntityRequest) (*UIServiceTypes_GetEntityResponse, error) - // - // Get all entities. - // This process returns a list of entity specs. - ListEntities(context.Context, *empty.Empty) (*UIServiceTypes_ListEntitiesResponse, error) - // - // Get feature specified in request. - GetFeature(context.Context, *UIServiceTypes_GetFeatureRequest) (*UIServiceTypes_GetFeatureResponse, error) - // - // Get all features. - // This process returns a list of feature specs. - ListFeatures(context.Context, *empty.Empty) (*UIServiceTypes_ListFeaturesResponse, error) - // - // Get feature group specified in request. - GetFeatureGroup(context.Context, *UIServiceTypes_GetFeatureGroupRequest) (*UIServiceTypes_GetFeatureGroupResponse, error) - // - // Get all feature groups. - // This process returns a list of feature group specs. - ListFeatureGroups(context.Context, *empty.Empty) (*UIServiceTypes_ListFeatureGroupsResponse, error) - // - // Get storage spec specified in request. - // - GetStorage(context.Context, *UIServiceTypes_GetStorageRequest) (*UIServiceTypes_GetStorageResponse, error) - // - // Get all storage specs. - // This process returns a list of storage specs. - ListStorage(context.Context, *empty.Empty) (*UIServiceTypes_ListStorageResponse, error) -} - -func RegisterUIServiceServer(s *grpc.Server, srv UIServiceServer) { - s.RegisterService(&_UIService_serviceDesc, srv) -} - -func _UIService_GetEntity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UIServiceTypes_GetEntityRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).GetEntity(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/GetEntity", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).GetEntity(ctx, req.(*UIServiceTypes_GetEntityRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_ListEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).ListEntities(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/ListEntities", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).ListEntities(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_GetFeature_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UIServiceTypes_GetFeatureRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).GetFeature(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/GetFeature", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).GetFeature(ctx, req.(*UIServiceTypes_GetFeatureRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_ListFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).ListFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/ListFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).ListFeatures(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_GetFeatureGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UIServiceTypes_GetFeatureGroupRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).GetFeatureGroup(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/GetFeatureGroup", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).GetFeatureGroup(ctx, req.(*UIServiceTypes_GetFeatureGroupRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_ListFeatureGroups_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).ListFeatureGroups(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/ListFeatureGroups", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).ListFeatureGroups(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_GetStorage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UIServiceTypes_GetStorageRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).GetStorage(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/GetStorage", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).GetStorage(ctx, req.(*UIServiceTypes_GetStorageRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _UIService_ListStorage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(empty.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UIServiceServer).ListStorage(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.UIService/ListStorage", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UIServiceServer).ListStorage(ctx, req.(*empty.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -var _UIService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.UIService", - HandlerType: (*UIServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetEntity", - Handler: _UIService_GetEntity_Handler, - }, - { - MethodName: "ListEntities", - Handler: _UIService_ListEntities_Handler, - }, - { - MethodName: "GetFeature", - Handler: _UIService_GetFeature_Handler, - }, - { - MethodName: "ListFeatures", - Handler: _UIService_ListFeatures_Handler, - }, - { - MethodName: "GetFeatureGroup", - Handler: _UIService_GetFeatureGroup_Handler, - }, - { - MethodName: "ListFeatureGroups", - Handler: _UIService_ListFeatureGroups_Handler, - }, - { - MethodName: "GetStorage", - Handler: _UIService_GetStorage_Handler, - }, - { - MethodName: "ListStorage", - Handler: _UIService_ListStorage_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/UIService.proto", -} - -func init() { - proto.RegisterFile("feast/core/UIService.proto", fileDescriptor_UIService_04866529701c634c) -} - -var fileDescriptor_UIService_04866529701c634c = []byte{ - // 784 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6f, 0xd3, 0x40, - 0x10, 0xcd, 0x47, 0x49, 0x9a, 0x69, 0x5a, 0xe8, 0x82, 0xda, 0xb0, 0x50, 0x51, 0x99, 0x03, 0x91, - 0xda, 0xda, 0x6a, 0x28, 0x37, 0x24, 0xa4, 0xd2, 0x52, 0x21, 0x71, 0x00, 0xa7, 0x05, 0x0a, 0x5c, - 0x9c, 0x64, 0xe2, 0xba, 0x4d, 0x62, 0xd7, 0xde, 0x50, 0x05, 0x8e, 0x88, 0x1b, 0xfc, 0x26, 0xfe, - 0x16, 0x47, 0x94, 0xdd, 0xb5, 0xbd, 0x4e, 0x9a, 0xc4, 0xa9, 0xb8, 0x25, 0x3b, 0x33, 0x6f, 0x67, - 0xde, 0x9b, 0x19, 0x2f, 0xd0, 0x36, 0x5a, 0x01, 0x33, 0x9a, 0xae, 0x8f, 0xc6, 0xc9, 0xeb, 0x3a, - 0xfa, 0x5f, 0x9d, 0x26, 0xea, 0x9e, 0xef, 0x32, 0x97, 0x00, 0xb7, 0xe9, 0x43, 0x1b, 0x7d, 0x28, - 0xfc, 0x02, 0x0f, 0x9b, 0x81, 0x71, 0xd8, 0x63, 0x0e, 0x1b, 0xd4, 0x3d, 0x6c, 0x0a, 0x4f, 0xba, - 0xa1, 0x5a, 0x5f, 0xa1, 0xc5, 0xfa, 0x3e, 0x2a, 0x66, 0xed, 0x1a, 0xf3, 0x91, 0xef, 0xf6, 0xbd, - 0x49, 0x10, 0x75, 0xe6, 0xfa, 0x96, 0xad, 0x42, 0x3c, 0xb0, 0x5d, 0xd7, 0xee, 0xa0, 0xc1, 0xff, - 0x35, 0xfa, 0x6d, 0x03, 0xbb, 0x1e, 0x1b, 0x48, 0xe3, 0xa3, 0x51, 0x23, 0x73, 0xba, 0x18, 0x30, - 0xab, 0xeb, 0x09, 0x07, 0xed, 0x6f, 0x19, 0x56, 0xa2, 0xea, 0x8e, 0x07, 0x1e, 0x06, 0xf4, 0x77, - 0x16, 0xca, 0xa2, 0x8e, 0x03, 0x64, 0x96, 0xd3, 0x21, 0x5b, 0xb0, 0x30, 0xbc, 0xbc, 0x92, 0xdd, - 0xcc, 0x56, 0x97, 0x6a, 0xeb, 0xba, 0x28, 0x9e, 0xe7, 0xa3, 0xc7, 0x05, 0x9b, 0xdc, 0x89, 0x10, - 0x58, 0x38, 0x77, 0x1b, 0x41, 0x25, 0xb7, 0x99, 0xaf, 0x96, 0x4c, 0xfe, 0x9b, 0x3c, 0x87, 0xa5, - 0x8e, 0x15, 0xb0, 0x13, 0xaf, 0x65, 0x31, 0x6c, 0x55, 0xf2, 0x1c, 0x87, 0xea, 0x22, 0x37, 0x3d, - 0xcc, 0x4d, 0x3f, 0x0e, 0x73, 0x33, 0x55, 0x77, 0xaa, 0xc1, 0x9d, 0x23, 0x64, 0xe2, 0x22, 0x13, - 0x2f, 0xfb, 0x18, 0x30, 0xb2, 0x02, 0x39, 0xa7, 0xc5, 0x13, 0x2a, 0x99, 0x39, 0xa7, 0x45, 0x8f, - 0x61, 0x55, 0xf1, 0x09, 0x3c, 0xb7, 0x17, 0x20, 0x79, 0x01, 0x05, 0xe4, 0x27, 0x32, 0xf3, 0x27, - 0x7a, 0x2c, 0x9b, 0x9e, 0x2c, 0x5a, 0x57, 0x0b, 0x36, 0x65, 0x18, 0xfd, 0x0c, 0xf7, 0xde, 0x38, - 0x81, 0x80, 0x75, 0x30, 0x88, 0x80, 0x5f, 0xc2, 0x22, 0xca, 0xb3, 0x4a, 0x76, 0x33, 0x3f, 0x0f, - 0x74, 0x14, 0x48, 0x7f, 0xe4, 0x60, 0x59, 0x2a, 0x2e, 0x79, 0xde, 0x4e, 0xf0, 0x5c, 0x49, 0xf0, - 0xac, 0xb4, 0x8e, 0x24, 0x5a, 0x83, 0x72, 0xc3, 0xb1, 0x2f, 0xfb, 0xe8, 0x0f, 0xde, 0x3b, 0x78, - 0x55, 0xc9, 0x71, 0x32, 0x12, 0x67, 0xa4, 0x02, 0x45, 0xec, 0x59, 0x8d, 0x8e, 0x24, 0x7d, 0xd1, - 0x0c, 0xff, 0x46, 0x32, 0x2d, 0x4c, 0x96, 0xe9, 0xd6, 0x5c, 0x32, 0x91, 0x3d, 0x28, 0x36, 0x7d, - 0xe4, 0x91, 0x85, 0x99, 0x91, 0xa1, 0x2b, 0x7d, 0xcc, 0x85, 0x93, 0xd5, 0x4d, 0x52, 0xf7, 0x57, - 0x16, 0x88, 0xea, 0x25, 0x65, 0xd8, 0x87, 0x62, 0x5b, 0x1c, 0x49, 0xca, 0xaa, 0x53, 0x54, 0x48, - 0x50, 0x6d, 0x86, 0x81, 0xa4, 0x06, 0x45, 0xdf, 0xba, 0x1a, 0xd2, 0xca, 0x09, 0x9c, 0x46, 0x7b, - 0xe8, 0x48, 0xbf, 0x88, 0xb6, 0x90, 0xb6, 0xb8, 0x2d, 0x0e, 0x60, 0x51, 0xc2, 0x86, 0x6d, 0x91, - 0x3e, 0xa1, 0x28, 0x92, 0xfe, 0xcc, 0x02, 0x51, 0x37, 0x81, 0x6c, 0x8e, 0xdd, 0x44, 0x73, 0x6c, - 0x5c, 0x97, 0x65, 0xb4, 0x38, 0x64, 0x87, 0x8c, 0xe8, 0x99, 0x9b, 0x6f, 0xec, 0xaa, 0xb0, 0x16, - 0x73, 0xce, 0xa1, 0x27, 0xc9, 0xd3, 0x81, 0xf5, 0x31, 0x4f, 0x49, 0xc9, 0x3b, 0x28, 0xb7, 0x95, - 0x73, 0x99, 0xfd, 0xce, 0x6c, 0x5a, 0x94, 0xd2, 0xcd, 0x04, 0x04, 0xf5, 0xe0, 0xbe, 0xc2, 0x3e, - 0x3f, 0x8b, 0x25, 0xa8, 0xc3, 0xb2, 0xea, 0x1c, 0xea, 0x30, 0xe7, 0x85, 0x49, 0x0c, 0xfa, 0x1d, - 0x96, 0xe5, 0xda, 0x4d, 0x31, 0xa8, 0xca, 0x82, 0xfe, 0x2f, 0x32, 0x88, 0x01, 0x91, 0xa8, 0x93, - 0x14, 0xf8, 0xc8, 0xe7, 0x23, 0x72, 0x8a, 0xe7, 0x23, 0x10, 0x47, 0x29, 0xe6, 0x23, 0x51, 0xa1, - 0x19, 0x06, 0xd2, 0x53, 0xb8, 0x3b, 0x64, 0x7b, 0x2a, 0x74, 0xfe, 0x46, 0xd0, 0xb5, 0x3f, 0x05, - 0x28, 0x45, 0x9e, 0xe4, 0x0c, 0x4a, 0xd1, 0x06, 0x27, 0x5b, 0x53, 0xd0, 0x46, 0xbf, 0x05, 0x74, - 0x3b, 0x9d, 0xb3, 0xc8, 0x5c, 0xcb, 0x90, 0x53, 0x28, 0xab, 0x5b, 0x9d, 0xac, 0x8d, 0x49, 0x71, - 0x38, 0xfc, 0x82, 0x52, 0x63, 0x0a, 0xee, 0x75, 0x9f, 0x05, 0x2d, 0x43, 0x2e, 0x00, 0xe2, 0x49, - 0x20, 0x33, 0x12, 0x4b, 0x2e, 0x3d, 0xba, 0x93, 0xd2, 0x7b, 0xb4, 0x8e, 0x70, 0x0d, 0xdd, 0xb8, - 0x8e, 0xd1, 0x3d, 0xa6, 0x65, 0xc8, 0x37, 0xb8, 0x3d, 0x32, 0xd1, 0x64, 0x37, 0x55, 0x7a, 0xea, - 0x9e, 0xa0, 0xb5, 0x79, 0x42, 0xa2, 0xbb, 0x9b, 0xb0, 0x3a, 0x36, 0xdf, 0x13, 0x6b, 0xdb, 0x4b, - 0x57, 0x5b, 0x72, 0x4b, 0x44, 0x42, 0xc9, 0xc6, 0x9c, 0x25, 0x54, 0x72, 0xf8, 0x66, 0x09, 0x35, - 0x32, 0x2a, 0x5a, 0x86, 0x7c, 0x80, 0x25, 0x65, 0x86, 0x26, 0xd6, 0xa2, 0xcf, 0xa8, 0x65, 0x0c, - 0x78, 0xbf, 0x0e, 0xca, 0x43, 0x74, 0x3f, 0x7e, 0xc7, 0xbd, 0x1d, 0x02, 0x7f, 0x7a, 0x66, 0x3b, - 0xec, 0xac, 0xdf, 0xd0, 0x9b, 0x6e, 0xd7, 0xb0, 0xdd, 0x73, 0xbc, 0x30, 0xc4, 0x43, 0x92, 0x5f, - 0x1b, 0x18, 0x36, 0xf6, 0xd0, 0x1f, 0xae, 0x18, 0xc3, 0x76, 0x8d, 0xf8, 0xad, 0xdb, 0x28, 0x70, - 0xfb, 0xd3, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0x76, 0x0d, 0x62, 0x00, 0x0b, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/serving/Serving.pb.go b/protos/generated/go/feast/serving/Serving.pb.go deleted file mode 100644 index 2afb9ca284b..00000000000 --- a/protos/generated/go/feast/serving/Serving.pb.go +++ /dev/null @@ -1,339 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/serving/Serving.proto - -package serving // import "github.com/gojek/feast/protos/generated/go/feast/serving" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type QueryFeaturesRequest struct { - // e.g. "driver", "customer", "city". - EntityName string `protobuf:"bytes,1,opt,name=entityName,proto3" json:"entityName,omitempty"` - // List of entity ID. - EntityId []string `protobuf:"bytes,2,rep,name=entityId,proto3" json:"entityId,omitempty"` - // List of feature ID. - // feature ID is in the form of [entity_name].[feature_name] - // e.g: "driver.total_accepted_booking" - // all requested feature ID shall have same entity name. - FeatureId []string `protobuf:"bytes,3,rep,name=featureId,proto3" json:"featureId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueryFeaturesRequest) Reset() { *m = QueryFeaturesRequest{} } -func (m *QueryFeaturesRequest) String() string { return proto.CompactTextString(m) } -func (*QueryFeaturesRequest) ProtoMessage() {} -func (*QueryFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{0} -} -func (m *QueryFeaturesRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueryFeaturesRequest.Unmarshal(m, b) -} -func (m *QueryFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueryFeaturesRequest.Marshal(b, m, deterministic) -} -func (dst *QueryFeaturesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueryFeaturesRequest.Merge(dst, src) -} -func (m *QueryFeaturesRequest) XXX_Size() int { - return xxx_messageInfo_QueryFeaturesRequest.Size(m) -} -func (m *QueryFeaturesRequest) XXX_DiscardUnknown() { - xxx_messageInfo_QueryFeaturesRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_QueryFeaturesRequest proto.InternalMessageInfo - -func (m *QueryFeaturesRequest) GetEntityName() string { - if m != nil { - return m.EntityName - } - return "" -} - -func (m *QueryFeaturesRequest) GetEntityId() []string { - if m != nil { - return m.EntityId - } - return nil -} - -func (m *QueryFeaturesRequest) GetFeatureId() []string { - if m != nil { - return m.FeatureId - } - return nil -} - -type QueryFeaturesResponse struct { - // Entity name of the response - EntityName string `protobuf:"bytes,1,opt,name=entityName,proto3" json:"entityName,omitempty"` - // map of entity ID and its entity's properties. - Entities map[string]*Entity `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *QueryFeaturesResponse) Reset() { *m = QueryFeaturesResponse{} } -func (m *QueryFeaturesResponse) String() string { return proto.CompactTextString(m) } -func (*QueryFeaturesResponse) ProtoMessage() {} -func (*QueryFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{1} -} -func (m *QueryFeaturesResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_QueryFeaturesResponse.Unmarshal(m, b) -} -func (m *QueryFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_QueryFeaturesResponse.Marshal(b, m, deterministic) -} -func (dst *QueryFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueryFeaturesResponse.Merge(dst, src) -} -func (m *QueryFeaturesResponse) XXX_Size() int { - return xxx_messageInfo_QueryFeaturesResponse.Size(m) -} -func (m *QueryFeaturesResponse) XXX_DiscardUnknown() { - xxx_messageInfo_QueryFeaturesResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_QueryFeaturesResponse proto.InternalMessageInfo - -func (m *QueryFeaturesResponse) GetEntityName() string { - if m != nil { - return m.EntityName - } - return "" -} - -func (m *QueryFeaturesResponse) GetEntities() map[string]*Entity { - if m != nil { - return m.Entities - } - return nil -} - -type Entity struct { - // map of feature ID and its feature value. - Features map[string]*FeatureValue `protobuf:"bytes,1,rep,name=features,proto3" json:"features,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Entity) Reset() { *m = Entity{} } -func (m *Entity) String() string { return proto.CompactTextString(m) } -func (*Entity) ProtoMessage() {} -func (*Entity) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{2} -} -func (m *Entity) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Entity.Unmarshal(m, b) -} -func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Entity.Marshal(b, m, deterministic) -} -func (dst *Entity) XXX_Merge(src proto.Message) { - xxx_messageInfo_Entity.Merge(dst, src) -} -func (m *Entity) XXX_Size() int { - return xxx_messageInfo_Entity.Size(m) -} -func (m *Entity) XXX_DiscardUnknown() { - xxx_messageInfo_Entity.DiscardUnknown(m) -} - -var xxx_messageInfo_Entity proto.InternalMessageInfo - -func (m *Entity) GetFeatures() map[string]*FeatureValue { - if m != nil { - return m.Features - } - return nil -} - -type FeatureValue struct { - // value of feature - Value *types.Value `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` - // timestamp of the feature - Timestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FeatureValue) Reset() { *m = FeatureValue{} } -func (m *FeatureValue) String() string { return proto.CompactTextString(m) } -func (*FeatureValue) ProtoMessage() {} -func (*FeatureValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{3} -} -func (m *FeatureValue) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureValue.Unmarshal(m, b) -} -func (m *FeatureValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureValue.Marshal(b, m, deterministic) -} -func (dst *FeatureValue) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureValue.Merge(dst, src) -} -func (m *FeatureValue) XXX_Size() int { - return xxx_messageInfo_FeatureValue.Size(m) -} -func (m *FeatureValue) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureValue.DiscardUnknown(m) -} - -var xxx_messageInfo_FeatureValue proto.InternalMessageInfo - -func (m *FeatureValue) GetValue() *types.Value { - if m != nil { - return m.Value - } - return nil -} - -func (m *FeatureValue) GetTimestamp() *timestamp.Timestamp { - if m != nil { - return m.Timestamp - } - return nil -} - -func init() { - proto.RegisterType((*QueryFeaturesRequest)(nil), "feast.serving.QueryFeaturesRequest") - proto.RegisterType((*QueryFeaturesResponse)(nil), "feast.serving.QueryFeaturesResponse") - proto.RegisterMapType((map[string]*Entity)(nil), "feast.serving.QueryFeaturesResponse.EntitiesEntry") - proto.RegisterType((*Entity)(nil), "feast.serving.Entity") - proto.RegisterMapType((map[string]*FeatureValue)(nil), "feast.serving.Entity.FeaturesEntry") - proto.RegisterType((*FeatureValue)(nil), "feast.serving.FeatureValue") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// ServingAPIClient is the client API for ServingAPI service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ServingAPIClient interface { - // Query features from Feast serving storage - QueryFeatures(ctx context.Context, in *QueryFeaturesRequest, opts ...grpc.CallOption) (*QueryFeaturesResponse, error) -} - -type servingAPIClient struct { - cc *grpc.ClientConn -} - -func NewServingAPIClient(cc *grpc.ClientConn) ServingAPIClient { - return &servingAPIClient{cc} -} - -func (c *servingAPIClient) QueryFeatures(ctx context.Context, in *QueryFeaturesRequest, opts ...grpc.CallOption) (*QueryFeaturesResponse, error) { - out := new(QueryFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.serving.ServingAPI/QueryFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ServingAPIServer is the server API for ServingAPI service. -type ServingAPIServer interface { - // Query features from Feast serving storage - QueryFeatures(context.Context, *QueryFeaturesRequest) (*QueryFeaturesResponse, error) -} - -func RegisterServingAPIServer(s *grpc.Server, srv ServingAPIServer) { - s.RegisterService(&_ServingAPI_serviceDesc, srv) -} - -func _ServingAPI_QueryFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(QueryFeaturesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServingAPIServer).QueryFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.serving.ServingAPI/QueryFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServingAPIServer).QueryFeatures(ctx, req.(*QueryFeaturesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ServingAPI_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.serving.ServingAPI", - HandlerType: (*ServingAPIServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "QueryFeatures", - Handler: _ServingAPI_QueryFeatures_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/serving/Serving.proto", -} - -func init() { - proto.RegisterFile("feast/serving/Serving.proto", fileDescriptor_Serving_f91320f9a3f0c4cf) -} - -var fileDescriptor_Serving_f91320f9a3f0c4cf = []byte{ - // 429 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd3, 0x40, - 0x14, 0x75, 0x5a, 0x5c, 0x36, 0x77, 0x0d, 0xca, 0xe0, 0x62, 0xc8, 0x8a, 0x96, 0xac, 0x0f, 0x01, - 0x61, 0x06, 0xe3, 0x4b, 0xf1, 0x45, 0x5c, 0x58, 0xa1, 0x2f, 0x4b, 0x8d, 0x22, 0x52, 0x7c, 0x49, - 0xed, 0x6d, 0x4c, 0xdb, 0x64, 0x62, 0x66, 0x52, 0xc8, 0xf7, 0xf8, 0x59, 0xfe, 0x8c, 0x74, 0x66, - 0xd2, 0x26, 0xa5, 0xe8, 0x3e, 0x25, 0x33, 0xe7, 0xde, 0x73, 0xce, 0xcc, 0x3d, 0x03, 0x57, 0x4b, - 0x4c, 0xa4, 0xe2, 0x12, 0xab, 0x6d, 0x56, 0xa4, 0xfc, 0xb3, 0xf9, 0xb2, 0xb2, 0x12, 0x4a, 0x50, - 0x57, 0x83, 0xcc, 0x82, 0xfe, 0xcb, 0x54, 0x88, 0x74, 0x83, 0x5c, 0x83, 0xf3, 0x7a, 0xc9, 0x55, - 0x96, 0xa3, 0x54, 0x49, 0x5e, 0x9a, 0x7a, 0xff, 0x99, 0x21, 0x53, 0x4d, 0x89, 0x92, 0x7f, 0x4d, - 0x36, 0x35, 0x1a, 0x20, 0x28, 0xe1, 0xe9, 0xa7, 0x1a, 0xab, 0xe6, 0x23, 0x26, 0xaa, 0xae, 0x50, - 0xc6, 0xf8, 0xab, 0x46, 0xa9, 0xe8, 0x0b, 0x00, 0x2c, 0x54, 0xa6, 0x9a, 0xbb, 0x24, 0x47, 0x8f, - 0x8c, 0x48, 0xe8, 0xc4, 0x9d, 0x1d, 0xea, 0xc3, 0xb9, 0x59, 0x4d, 0x16, 0xde, 0x60, 0x34, 0x0c, - 0x9d, 0x78, 0xbf, 0xa6, 0xcf, 0xc1, 0x59, 0x1a, 0xba, 0xc9, 0xc2, 0x1b, 0x6a, 0xf0, 0xb0, 0x11, - 0xfc, 0x21, 0x70, 0x79, 0x24, 0x29, 0x4b, 0x51, 0x48, 0xfc, 0xaf, 0xe6, 0x9d, 0xd5, 0xcc, 0x50, - 0x6a, 0xcd, 0x8b, 0x28, 0x62, 0xbd, 0x7b, 0x60, 0x27, 0x79, 0xd9, 0xad, 0x6d, 0xba, 0x2d, 0x54, - 0xd5, 0xc4, 0x7b, 0x0e, 0x3f, 0x06, 0xb7, 0x07, 0xd1, 0x27, 0x30, 0x5c, 0x63, 0x63, 0x95, 0x77, - 0xbf, 0xf4, 0x35, 0x3c, 0xdc, 0xee, 0x6e, 0xcb, 0x1b, 0x8c, 0x48, 0x78, 0x11, 0x5d, 0x1e, 0xe9, - 0xe9, 0xf6, 0x26, 0x36, 0x35, 0xef, 0x06, 0x63, 0x12, 0xfc, 0x26, 0x70, 0x66, 0x76, 0xe9, 0x7b, - 0x38, 0xb7, 0xa7, 0x96, 0x1e, 0xd1, 0x76, 0xaf, 0x4f, 0xb6, 0xb3, 0xd6, 0xb0, 0xf5, 0xd7, 0x36, - 0xf9, 0xdf, 0xc0, 0xed, 0x41, 0x27, 0xfc, 0xbd, 0xe9, 0xfb, 0xbb, 0x3a, 0x12, 0xb0, 0xed, 0x7a, - 0xe0, 0x5d, 0x97, 0x15, 0x3c, 0xea, 0x42, 0x34, 0x6c, 0x69, 0x88, 0xa6, 0xa1, 0x96, 0x46, 0xc7, - 0x85, 0x75, 0xbb, 0xe9, 0x18, 0x9c, 0x7d, 0xb6, 0xac, 0xa8, 0xcf, 0x4c, 0xfa, 0x58, 0x9b, 0x3e, - 0xf6, 0xa5, 0xad, 0x88, 0x0f, 0xc5, 0xd1, 0x0a, 0xc0, 0x66, 0xf8, 0xc3, 0x74, 0x42, 0xbf, 0x83, - 0xdb, 0x1b, 0x16, 0xbd, 0xfe, 0xf7, 0x28, 0x75, 0x2a, 0xfd, 0x57, 0xf7, 0x99, 0x77, 0xf0, 0xe0, - 0x66, 0x06, 0xfd, 0x07, 0x72, 0xf3, 0xf8, 0x20, 0x3d, 0xdd, 0xb9, 0x9c, 0x8d, 0xd3, 0x4c, 0xfd, - 0xac, 0xe7, 0xec, 0x87, 0xc8, 0x79, 0x2a, 0x56, 0xb8, 0xe6, 0xe6, 0x85, 0xe8, 0x33, 0x48, 0x9e, - 0x62, 0x81, 0x55, 0xa2, 0x70, 0xc1, 0x53, 0xc1, 0x7b, 0x0f, 0x71, 0x7e, 0xa6, 0x4b, 0xde, 0xfe, - 0x0d, 0x00, 0x00, 0xff, 0xff, 0x77, 0xb5, 0x0e, 0xd1, 0xa0, 0x03, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/EntitySpec.pb.go b/protos/generated/go/feast/specs/EntitySpec.pb.go deleted file mode 100644 index 0f4374c679e..00000000000 --- a/protos/generated/go/feast/specs/EntitySpec.pb.go +++ /dev/null @@ -1,97 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/EntitySpec.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type EntitySpec struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` - Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *EntitySpec) Reset() { *m = EntitySpec{} } -func (m *EntitySpec) String() string { return proto.CompactTextString(m) } -func (*EntitySpec) ProtoMessage() {} -func (*EntitySpec) Descriptor() ([]byte, []int) { - return fileDescriptor_EntitySpec_b8950ded39b854cb, []int{0} -} -func (m *EntitySpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_EntitySpec.Unmarshal(m, b) -} -func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_EntitySpec.Marshal(b, m, deterministic) -} -func (dst *EntitySpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_EntitySpec.Merge(dst, src) -} -func (m *EntitySpec) XXX_Size() int { - return xxx_messageInfo_EntitySpec.Size(m) -} -func (m *EntitySpec) XXX_DiscardUnknown() { - xxx_messageInfo_EntitySpec.DiscardUnknown(m) -} - -var xxx_messageInfo_EntitySpec proto.InternalMessageInfo - -func (m *EntitySpec) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *EntitySpec) GetDescription() string { - if m != nil { - return m.Description - } - return "" -} - -func (m *EntitySpec) GetTags() []string { - if m != nil { - return m.Tags - } - return nil -} - -func init() { - proto.RegisterType((*EntitySpec)(nil), "feast.specs.EntitySpec") -} - -func init() { - proto.RegisterFile("feast/specs/EntitySpec.proto", fileDescriptor_EntitySpec_b8950ded39b854cb) -} - -var fileDescriptor_EntitySpec_b8950ded39b854cb = []byte{ - // 177 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0x4b, 0x4d, 0x2c, - 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0xcd, 0x2b, 0xc9, 0x2c, 0xa9, 0x0c, 0x2e, - 0x48, 0x4d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x06, 0xcb, 0xea, 0x81, 0x65, 0x95, - 0xc2, 0xb8, 0xb8, 0x10, 0x0a, 0x84, 0x84, 0xb8, 0x58, 0xf2, 0x12, 0x73, 0x53, 0x25, 0x18, 0x15, - 0x18, 0x35, 0x38, 0x83, 0xc0, 0x6c, 0x21, 0x05, 0x2e, 0xee, 0x94, 0xd4, 0xe2, 0xe4, 0xa2, 0xcc, - 0x82, 0x92, 0xcc, 0xfc, 0x3c, 0x09, 0x26, 0xb0, 0x14, 0xb2, 0x10, 0x48, 0x57, 0x49, 0x62, 0x7a, - 0xb1, 0x04, 0xb3, 0x02, 0x33, 0x48, 0x17, 0x88, 0xed, 0x14, 0xc6, 0x85, 0x6c, 0x8d, 0x13, 0x3f, - 0xc2, 0x92, 0x00, 0x90, 0x23, 0xa2, 0xcc, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, - 0x73, 0xf5, 0xd3, 0xf3, 0xb3, 0x52, 0xb3, 0xf5, 0x21, 0x6e, 0x06, 0x3b, 0xb1, 0x58, 0x3f, 0x3d, - 0x35, 0x2f, 0xb5, 0x28, 0xb1, 0x24, 0x35, 0x45, 0x3f, 0x3d, 0x5f, 0x1f, 0xc9, 0x37, 0x49, 0x6c, - 0x60, 0x05, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe9, 0xf2, 0x1c, 0x48, 0xe3, 0x00, 0x00, - 0x00, -} diff --git a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go b/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go deleted file mode 100644 index f6bf2fcc433..00000000000 --- a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go +++ /dev/null @@ -1,102 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/FeatureGroupSpec.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FeatureGroupSpec struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Tags []string `protobuf:"bytes,2,rep,name=tags,proto3" json:"tags,omitempty"` - Options map[string]string `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FeatureGroupSpec) Reset() { *m = FeatureGroupSpec{} } -func (m *FeatureGroupSpec) String() string { return proto.CompactTextString(m) } -func (*FeatureGroupSpec) ProtoMessage() {} -func (*FeatureGroupSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureGroupSpec_863105520104cb04, []int{0} -} -func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureGroupSpec.Unmarshal(m, b) -} -func (m *FeatureGroupSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureGroupSpec.Marshal(b, m, deterministic) -} -func (dst *FeatureGroupSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureGroupSpec.Merge(dst, src) -} -func (m *FeatureGroupSpec) XXX_Size() int { - return xxx_messageInfo_FeatureGroupSpec.Size(m) -} -func (m *FeatureGroupSpec) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureGroupSpec.DiscardUnknown(m) -} - -var xxx_messageInfo_FeatureGroupSpec proto.InternalMessageInfo - -func (m *FeatureGroupSpec) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *FeatureGroupSpec) GetTags() []string { - if m != nil { - return m.Tags - } - return nil -} - -func (m *FeatureGroupSpec) GetOptions() map[string]string { - if m != nil { - return m.Options - } - return nil -} - -func init() { - proto.RegisterType((*FeatureGroupSpec)(nil), "feast.specs.FeatureGroupSpec") - proto.RegisterMapType((map[string]string)(nil), "feast.specs.FeatureGroupSpec.OptionsEntry") -} - -func init() { - proto.RegisterFile("feast/specs/FeatureGroupSpec.proto", fileDescriptor_FeatureGroupSpec_863105520104cb04) -} - -var fileDescriptor_FeatureGroupSpec_863105520104cb04 = []byte{ - // 245 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4b, 0x4d, 0x2c, - 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0x75, - 0x2f, 0xca, 0x2f, 0x2d, 0x08, 0x2e, 0x48, 0x4d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, - 0x06, 0xab, 0xd1, 0x03, 0xab, 0x91, 0x92, 0xc5, 0xa2, 0x01, 0xa1, 0x56, 0x69, 0x07, 0x23, 0x97, - 0x00, 0xba, 0x31, 0x42, 0x7c, 0x5c, 0x4c, 0x99, 0x29, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, - 0x4c, 0x99, 0x29, 0x42, 0x42, 0x5c, 0x2c, 0x25, 0x89, 0xe9, 0xc5, 0x12, 0x4c, 0x0a, 0xcc, 0x1a, - 0x9c, 0x41, 0x60, 0xb6, 0x90, 0x0b, 0x17, 0x7b, 0x7e, 0x41, 0x49, 0x66, 0x7e, 0x5e, 0xb1, 0x04, - 0xb3, 0x02, 0xb3, 0x06, 0xb7, 0x91, 0x96, 0x1e, 0x92, 0xb5, 0x7a, 0x18, 0x4e, 0xf3, 0x87, 0x28, - 0x76, 0xcd, 0x2b, 0x29, 0xaa, 0x0c, 0x82, 0x69, 0x95, 0xb2, 0xe2, 0xe2, 0x41, 0x96, 0x10, 0x12, - 0xe0, 0x62, 0xce, 0x4e, 0xad, 0x84, 0x5a, 0x0d, 0x62, 0x0a, 0x89, 0x70, 0xb1, 0x96, 0x25, 0xe6, - 0x94, 0xa6, 0x4a, 0x30, 0x81, 0xc5, 0x20, 0x1c, 0x2b, 0x26, 0x0b, 0x46, 0xa7, 0x18, 0x2e, 0x64, - 0x8f, 0x3a, 0x89, 0xa2, 0x5b, 0x19, 0x00, 0xf2, 0x60, 0x94, 0x59, 0x7a, 0x66, 0x49, 0x46, 0x69, - 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x56, 0x6a, 0xb6, 0x3e, 0x24, 0x40, 0xc0, 0xde, - 0x2f, 0xd6, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0x4a, 0x2c, 0x49, 0x4d, 0xd1, 0x4f, 0xcf, 0xd7, 0x47, - 0x0a, 0xaa, 0x24, 0x36, 0xb0, 0x02, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc3, 0xb3, 0x0b, - 0xcf, 0x71, 0x01, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/FeatureSpec.pb.go b/protos/generated/go/feast/specs/FeatureSpec.pb.go deleted file mode 100644 index 2432d8213f2..00000000000 --- a/protos/generated/go/feast/specs/FeatureSpec.pb.go +++ /dev/null @@ -1,166 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/FeatureSpec.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FeatureSpec struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Owner string `protobuf:"bytes,3,opt,name=owner,proto3" json:"owner,omitempty"` - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` - Uri string `protobuf:"bytes,5,opt,name=uri,proto3" json:"uri,omitempty"` - ValueType types.ValueType_Enum `protobuf:"varint,7,opt,name=valueType,proto3,enum=feast.types.ValueType_Enum" json:"valueType,omitempty"` - Entity string `protobuf:"bytes,8,opt,name=entity,proto3" json:"entity,omitempty"` - Group string `protobuf:"bytes,9,opt,name=group,proto3" json:"group,omitempty"` - Tags []string `protobuf:"bytes,10,rep,name=tags,proto3" json:"tags,omitempty"` - Options map[string]string `protobuf:"bytes,11,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FeatureSpec) Reset() { *m = FeatureSpec{} } -func (m *FeatureSpec) String() string { return proto.CompactTextString(m) } -func (*FeatureSpec) ProtoMessage() {} -func (*FeatureSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_182bc164237cde02, []int{0} -} -func (m *FeatureSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureSpec.Unmarshal(m, b) -} -func (m *FeatureSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureSpec.Marshal(b, m, deterministic) -} -func (dst *FeatureSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureSpec.Merge(dst, src) -} -func (m *FeatureSpec) XXX_Size() int { - return xxx_messageInfo_FeatureSpec.Size(m) -} -func (m *FeatureSpec) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureSpec.DiscardUnknown(m) -} - -var xxx_messageInfo_FeatureSpec proto.InternalMessageInfo - -func (m *FeatureSpec) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *FeatureSpec) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *FeatureSpec) GetOwner() string { - if m != nil { - return m.Owner - } - return "" -} - -func (m *FeatureSpec) GetDescription() string { - if m != nil { - return m.Description - } - return "" -} - -func (m *FeatureSpec) GetUri() string { - if m != nil { - return m.Uri - } - return "" -} - -func (m *FeatureSpec) GetValueType() types.ValueType_Enum { - if m != nil { - return m.ValueType - } - return types.ValueType_UNKNOWN -} - -func (m *FeatureSpec) GetEntity() string { - if m != nil { - return m.Entity - } - return "" -} - -func (m *FeatureSpec) GetGroup() string { - if m != nil { - return m.Group - } - return "" -} - -func (m *FeatureSpec) GetTags() []string { - if m != nil { - return m.Tags - } - return nil -} - -func (m *FeatureSpec) GetOptions() map[string]string { - if m != nil { - return m.Options - } - return nil -} - -func init() { - proto.RegisterType((*FeatureSpec)(nil), "feast.specs.FeatureSpec") - proto.RegisterMapType((map[string]string)(nil), "feast.specs.FeatureSpec.OptionsEntry") -} - -func init() { - proto.RegisterFile("feast/specs/FeatureSpec.proto", fileDescriptor_FeatureSpec_182bc164237cde02) -} - -var fileDescriptor_FeatureSpec_182bc164237cde02 = []byte{ - // 357 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x51, 0x4d, 0x6b, 0xe3, 0x30, - 0x10, 0xc5, 0x76, 0x3e, 0x36, 0xf2, 0x12, 0x82, 0x58, 0x76, 0x85, 0x77, 0x17, 0x4c, 0xa1, 0x90, - 0x93, 0x04, 0x29, 0x94, 0x36, 0x97, 0x42, 0x20, 0xbd, 0xb6, 0x24, 0x25, 0x85, 0xde, 0x1c, 0x7b, - 0xea, 0xba, 0x69, 0x2c, 0x23, 0xc9, 0x2d, 0xfe, 0xe1, 0xbd, 0x17, 0x8d, 0x12, 0xa2, 0xde, 0x66, - 0xe6, 0x3d, 0x8d, 0xde, 0xbc, 0x47, 0xfe, 0x3f, 0x43, 0xa6, 0x8d, 0xd0, 0x0d, 0xe4, 0x5a, 0xdc, - 0x42, 0x66, 0x5a, 0x05, 0xeb, 0x06, 0x72, 0xde, 0x28, 0x69, 0x24, 0x8d, 0x11, 0xe6, 0x08, 0x27, - 0xff, 0x7c, 0xee, 0xb2, 0x36, 0x95, 0xe9, 0x4e, 0xd4, 0xe4, 0xdb, 0xa6, 0xb5, 0x91, 0x2a, 0x2b, - 0xbd, 0x4d, 0xc9, 0x1f, 0x07, 0x9b, 0xae, 0x01, 0x2d, 0x36, 0xd9, 0x5b, 0x0b, 0x0e, 0x38, 0xfb, - 0x0c, 0x49, 0xec, 0x7d, 0x4c, 0xc7, 0x24, 0xac, 0x0a, 0x16, 0xa4, 0xc1, 0x74, 0xb4, 0x0a, 0xab, - 0x82, 0x52, 0xd2, 0xab, 0xb3, 0x3d, 0xb0, 0x10, 0x27, 0x58, 0xd3, 0x5f, 0xa4, 0x2f, 0x3f, 0x6a, - 0x50, 0x2c, 0xc2, 0xa1, 0x6b, 0x68, 0x4a, 0xe2, 0x02, 0x74, 0xae, 0xaa, 0xc6, 0x54, 0xb2, 0x66, - 0x3d, 0xc4, 0xfc, 0x11, 0x9d, 0x90, 0xa8, 0x55, 0x15, 0xeb, 0x23, 0x62, 0x4b, 0x7a, 0x4d, 0x46, - 0xef, 0x56, 0xcc, 0x43, 0xd7, 0x00, 0x1b, 0xa6, 0xc1, 0x74, 0x3c, 0xfb, 0xcb, 0xdd, 0xd1, 0x28, - 0x95, 0x6f, 0x8e, 0x28, 0x5f, 0xd6, 0xed, 0x7e, 0x75, 0x62, 0xd3, 0xdf, 0x64, 0x00, 0x68, 0x02, - 0xfb, 0x81, 0xfb, 0x0e, 0x9d, 0x15, 0x57, 0x2a, 0xd9, 0x36, 0x6c, 0xe4, 0xc4, 0x61, 0x63, 0xcf, - 0x30, 0x59, 0xa9, 0x19, 0x49, 0x23, 0x7b, 0x86, 0xad, 0xe9, 0x0d, 0x19, 0x4a, 0x14, 0xa6, 0x59, - 0x9c, 0x46, 0xd3, 0x78, 0x76, 0xce, 0x3d, 0xbf, 0xb9, 0x1f, 0xc7, 0x9d, 0xe3, 0x2d, 0x6b, 0xa3, - 0xba, 0xd5, 0xf1, 0x55, 0x32, 0x27, 0x3f, 0x7d, 0xc0, 0xde, 0xb7, 0x83, 0xee, 0x60, 0x9e, 0x2d, - 0xad, 0x18, 0x54, 0x7c, 0xb0, 0xcf, 0x35, 0xf3, 0xf0, 0x2a, 0x58, 0x3c, 0x12, 0x3f, 0xdc, 0xc5, - 0xc4, 0xfb, 0xed, 0xde, 0x06, 0xf3, 0x74, 0x59, 0x56, 0xe6, 0xa5, 0xdd, 0xf2, 0x5c, 0xee, 0x45, - 0x29, 0x5f, 0x61, 0x27, 0x5c, 0x84, 0x18, 0x9b, 0x16, 0x25, 0xd4, 0xa0, 0x32, 0x03, 0x85, 0x28, - 0xa5, 0xf0, 0xb2, 0xdf, 0x0e, 0x90, 0x70, 0xf1, 0x15, 0x00, 0x00, 0xff, 0xff, 0xbb, 0xd8, 0x93, - 0x97, 0x5b, 0x02, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/ImportJobSpecs.pb.go b/protos/generated/go/feast/specs/ImportJobSpecs.pb.go deleted file mode 100644 index c75520403d6..00000000000 --- a/protos/generated/go/feast/specs/ImportJobSpecs.pb.go +++ /dev/null @@ -1,136 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/ImportJobSpecs.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type ImportJobSpecs struct { - JobId string `protobuf:"bytes,1,opt,name=jobId,proto3" json:"jobId,omitempty"` - ImportSpec *ImportSpec `protobuf:"bytes,2,opt,name=importSpec,proto3" json:"importSpec,omitempty"` - EntitySpecs []*EntitySpec `protobuf:"bytes,3,rep,name=entitySpecs,proto3" json:"entitySpecs,omitempty"` - FeatureSpecs []*FeatureSpec `protobuf:"bytes,4,rep,name=featureSpecs,proto3" json:"featureSpecs,omitempty"` - ServingStorageSpec *StorageSpec `protobuf:"bytes,5,opt,name=servingStorageSpec,proto3" json:"servingStorageSpec,omitempty"` - WarehouseStorageSpec *StorageSpec `protobuf:"bytes,6,opt,name=warehouseStorageSpec,proto3" json:"warehouseStorageSpec,omitempty"` - ErrorsStorageSpec *StorageSpec `protobuf:"bytes,7,opt,name=errorsStorageSpec,proto3" json:"errorsStorageSpec,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ImportJobSpecs) Reset() { *m = ImportJobSpecs{} } -func (m *ImportJobSpecs) String() string { return proto.CompactTextString(m) } -func (*ImportJobSpecs) ProtoMessage() {} -func (*ImportJobSpecs) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportJobSpecs_876ba7e8763b3d51, []int{0} -} -func (m *ImportJobSpecs) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ImportJobSpecs.Unmarshal(m, b) -} -func (m *ImportJobSpecs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ImportJobSpecs.Marshal(b, m, deterministic) -} -func (dst *ImportJobSpecs) XXX_Merge(src proto.Message) { - xxx_messageInfo_ImportJobSpecs.Merge(dst, src) -} -func (m *ImportJobSpecs) XXX_Size() int { - return xxx_messageInfo_ImportJobSpecs.Size(m) -} -func (m *ImportJobSpecs) XXX_DiscardUnknown() { - xxx_messageInfo_ImportJobSpecs.DiscardUnknown(m) -} - -var xxx_messageInfo_ImportJobSpecs proto.InternalMessageInfo - -func (m *ImportJobSpecs) GetJobId() string { - if m != nil { - return m.JobId - } - return "" -} - -func (m *ImportJobSpecs) GetImportSpec() *ImportSpec { - if m != nil { - return m.ImportSpec - } - return nil -} - -func (m *ImportJobSpecs) GetEntitySpecs() []*EntitySpec { - if m != nil { - return m.EntitySpecs - } - return nil -} - -func (m *ImportJobSpecs) GetFeatureSpecs() []*FeatureSpec { - if m != nil { - return m.FeatureSpecs - } - return nil -} - -func (m *ImportJobSpecs) GetServingStorageSpec() *StorageSpec { - if m != nil { - return m.ServingStorageSpec - } - return nil -} - -func (m *ImportJobSpecs) GetWarehouseStorageSpec() *StorageSpec { - if m != nil { - return m.WarehouseStorageSpec - } - return nil -} - -func (m *ImportJobSpecs) GetErrorsStorageSpec() *StorageSpec { - if m != nil { - return m.ErrorsStorageSpec - } - return nil -} - -func init() { - proto.RegisterType((*ImportJobSpecs)(nil), "feast.specs.ImportJobSpecs") -} - -func init() { - proto.RegisterFile("feast/specs/ImportJobSpecs.proto", fileDescriptor_ImportJobSpecs_876ba7e8763b3d51) -} - -var fileDescriptor_ImportJobSpecs_876ba7e8763b3d51 = []byte{ - // 304 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x51, 0x4b, 0xfb, 0x30, - 0x14, 0xc5, 0xd9, 0xbf, 0xff, 0x4d, 0x4c, 0x45, 0x30, 0x0e, 0x0c, 0xa2, 0x50, 0x7c, 0xda, 0x53, - 0x03, 0x0a, 0x8a, 0xe0, 0xd3, 0xc0, 0xe1, 0xc4, 0x07, 0xe9, 0xde, 0xf6, 0xd6, 0x6e, 0x77, 0x59, - 0x27, 0xeb, 0x2d, 0x49, 0xaa, 0xf8, 0xcd, 0xfc, 0x78, 0xd2, 0x14, 0xd6, 0x1b, 0x3a, 0xd9, 0x63, - 0x38, 0xbf, 0x73, 0x38, 0x87, 0x5c, 0x16, 0xad, 0x20, 0x35, 0x56, 0x9a, 0x12, 0x16, 0x46, 0x4e, - 0xb7, 0x25, 0x6a, 0xfb, 0x8a, 0xd9, 0xac, 0x7e, 0xc6, 0xa5, 0x46, 0x8b, 0x3c, 0x74, 0x44, 0xec, - 0x88, 0xcb, 0xab, 0x2e, 0x5e, 0xb3, 0x0d, 0xea, 0xab, 0xcf, 0x85, 0xcd, 0xed, 0x37, 0x51, 0xaf, - 0xa9, 0x3a, 0x81, 0xd4, 0x56, 0x1a, 0xfe, 0x92, 0x67, 0x16, 0x75, 0xaa, 0x88, 0x7c, 0xf3, 0x13, - 0xb0, 0x53, 0xbf, 0x1f, 0x1f, 0xb2, 0xfe, 0x06, 0xb3, 0xe9, 0x52, 0xf4, 0xa2, 0xde, 0xe8, 0x38, - 0x69, 0x1e, 0xfc, 0x81, 0xb1, 0x7c, 0x57, 0x4c, 0xfc, 0x8b, 0x7a, 0xa3, 0xf0, 0xf6, 0x22, 0x26, - 0x23, 0xe2, 0xb6, 0x77, 0x42, 0x50, 0xfe, 0xc8, 0x42, 0xd8, 0x75, 0x36, 0x22, 0x88, 0x82, 0x8e, - 0xb3, 0xdd, 0x94, 0x50, 0x96, 0x3f, 0xb1, 0x93, 0x55, 0x3b, 0xc8, 0x88, 0xff, 0xce, 0x2b, 0x3c, - 0x2f, 0x59, 0x9c, 0x78, 0x34, 0x7f, 0x61, 0xdc, 0x80, 0xfe, 0xcc, 0x0b, 0x45, 0x66, 0x8b, 0xbe, - 0x6b, 0xee, 0x67, 0x10, 0x3d, 0xd9, 0xe3, 0xe1, 0x6f, 0x6c, 0xf8, 0x95, 0x6a, 0x58, 0x63, 0x65, - 0x80, 0x66, 0x0d, 0x0e, 0x64, 0xed, 0x75, 0xf1, 0x09, 0x3b, 0x03, 0xad, 0x51, 0x1b, 0x1a, 0x75, - 0x74, 0x20, 0xaa, 0x6b, 0x19, 0xcf, 0x19, 0xbd, 0xa1, 0xf1, 0xb9, 0xff, 0x8d, 0xef, 0xf5, 0xf7, - 0xce, 0xef, 0x55, 0x6e, 0xd7, 0x55, 0x16, 0x2f, 0x70, 0x2b, 0x15, 0x6e, 0xe0, 0x43, 0x36, 0xe7, - 0xe0, 0x3e, 0xdf, 0x48, 0x05, 0x05, 0xe8, 0xd4, 0xc2, 0x52, 0x2a, 0x94, 0xe4, 0x50, 0xb2, 0x81, - 0x03, 0xee, 0x7e, 0x03, 0x00, 0x00, 0xff, 0xff, 0x48, 0xca, 0x5a, 0x01, 0xc8, 0x02, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/ImportSpec.pb.go b/protos/generated/go/feast/specs/ImportSpec.pb.go deleted file mode 100644 index d16f9fa07bd..00000000000 --- a/protos/generated/go/feast/specs/ImportSpec.pb.go +++ /dev/null @@ -1,339 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/ImportSpec.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type ImportSpec struct { - Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` - SourceOptions map[string]string `protobuf:"bytes,2,rep,name=sourceOptions,proto3" json:"sourceOptions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - JobOptions map[string]string `protobuf:"bytes,5,rep,name=jobOptions,proto3" json:"jobOptions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Entities []string `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` - Schema *Schema `protobuf:"bytes,4,opt,name=schema,proto3" json:"schema,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ImportSpec) Reset() { *m = ImportSpec{} } -func (m *ImportSpec) String() string { return proto.CompactTextString(m) } -func (*ImportSpec) ProtoMessage() {} -func (*ImportSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{0} -} -func (m *ImportSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ImportSpec.Unmarshal(m, b) -} -func (m *ImportSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ImportSpec.Marshal(b, m, deterministic) -} -func (dst *ImportSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_ImportSpec.Merge(dst, src) -} -func (m *ImportSpec) XXX_Size() int { - return xxx_messageInfo_ImportSpec.Size(m) -} -func (m *ImportSpec) XXX_DiscardUnknown() { - xxx_messageInfo_ImportSpec.DiscardUnknown(m) -} - -var xxx_messageInfo_ImportSpec proto.InternalMessageInfo - -func (m *ImportSpec) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *ImportSpec) GetSourceOptions() map[string]string { - if m != nil { - return m.SourceOptions - } - return nil -} - -func (m *ImportSpec) GetJobOptions() map[string]string { - if m != nil { - return m.JobOptions - } - return nil -} - -func (m *ImportSpec) GetEntities() []string { - if m != nil { - return m.Entities - } - return nil -} - -func (m *ImportSpec) GetSchema() *Schema { - if m != nil { - return m.Schema - } - return nil -} - -type Schema struct { - Fields []*Field `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` - // the event timestamp to set per row. - // - // Types that are valid to be assigned to Timestamp: - // *Schema_TimestampColumn - // *Schema_TimestampValue - Timestamp isSchema_Timestamp `protobuf_oneof:"timestamp"` - EntityIdColumn string `protobuf:"bytes,7,opt,name=entityIdColumn,proto3" json:"entityIdColumn,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Schema) Reset() { *m = Schema{} } -func (m *Schema) String() string { return proto.CompactTextString(m) } -func (*Schema) ProtoMessage() {} -func (*Schema) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{1} -} -func (m *Schema) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Schema.Unmarshal(m, b) -} -func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Schema.Marshal(b, m, deterministic) -} -func (dst *Schema) XXX_Merge(src proto.Message) { - xxx_messageInfo_Schema.Merge(dst, src) -} -func (m *Schema) XXX_Size() int { - return xxx_messageInfo_Schema.Size(m) -} -func (m *Schema) XXX_DiscardUnknown() { - xxx_messageInfo_Schema.DiscardUnknown(m) -} - -var xxx_messageInfo_Schema proto.InternalMessageInfo - -func (m *Schema) GetFields() []*Field { - if m != nil { - return m.Fields - } - return nil -} - -type isSchema_Timestamp interface { - isSchema_Timestamp() -} - -type Schema_TimestampColumn struct { - TimestampColumn string `protobuf:"bytes,5,opt,name=timestampColumn,proto3,oneof"` -} - -type Schema_TimestampValue struct { - TimestampValue *timestamp.Timestamp `protobuf:"bytes,6,opt,name=timestampValue,proto3,oneof"` -} - -func (*Schema_TimestampColumn) isSchema_Timestamp() {} - -func (*Schema_TimestampValue) isSchema_Timestamp() {} - -func (m *Schema) GetTimestamp() isSchema_Timestamp { - if m != nil { - return m.Timestamp - } - return nil -} - -func (m *Schema) GetTimestampColumn() string { - if x, ok := m.GetTimestamp().(*Schema_TimestampColumn); ok { - return x.TimestampColumn - } - return "" -} - -func (m *Schema) GetTimestampValue() *timestamp.Timestamp { - if x, ok := m.GetTimestamp().(*Schema_TimestampValue); ok { - return x.TimestampValue - } - return nil -} - -func (m *Schema) GetEntityIdColumn() string { - if m != nil { - return m.EntityIdColumn - } - return "" -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Schema) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Schema_OneofMarshaler, _Schema_OneofUnmarshaler, _Schema_OneofSizer, []interface{}{ - (*Schema_TimestampColumn)(nil), - (*Schema_TimestampValue)(nil), - } -} - -func _Schema_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Schema) - // timestamp - switch x := m.Timestamp.(type) { - case *Schema_TimestampColumn: - b.EncodeVarint(5<<3 | proto.WireBytes) - b.EncodeStringBytes(x.TimestampColumn) - case *Schema_TimestampValue: - b.EncodeVarint(6<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampValue); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Schema.Timestamp has unexpected type %T", x) - } - return nil -} - -func _Schema_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Schema) - switch tag { - case 5: // timestamp.timestampColumn - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Timestamp = &Schema_TimestampColumn{x} - return true, err - case 6: // timestamp.timestampValue - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(timestamp.Timestamp) - err := b.DecodeMessage(msg) - m.Timestamp = &Schema_TimestampValue{msg} - return true, err - default: - return false, nil - } -} - -func _Schema_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Schema) - // timestamp - switch x := m.Timestamp.(type) { - case *Schema_TimestampColumn: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.TimestampColumn))) - n += len(x.TimestampColumn) - case *Schema_TimestampValue: - s := proto.Size(x.TimestampValue) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type Field struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - FeatureId string `protobuf:"bytes,2,opt,name=featureId,proto3" json:"featureId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Field) Reset() { *m = Field{} } -func (m *Field) String() string { return proto.CompactTextString(m) } -func (*Field) ProtoMessage() {} -func (*Field) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{2} -} -func (m *Field) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Field.Unmarshal(m, b) -} -func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Field.Marshal(b, m, deterministic) -} -func (dst *Field) XXX_Merge(src proto.Message) { - xxx_messageInfo_Field.Merge(dst, src) -} -func (m *Field) XXX_Size() int { - return xxx_messageInfo_Field.Size(m) -} -func (m *Field) XXX_DiscardUnknown() { - xxx_messageInfo_Field.DiscardUnknown(m) -} - -var xxx_messageInfo_Field proto.InternalMessageInfo - -func (m *Field) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *Field) GetFeatureId() string { - if m != nil { - return m.FeatureId - } - return "" -} - -func init() { - proto.RegisterType((*ImportSpec)(nil), "feast.specs.ImportSpec") - proto.RegisterMapType((map[string]string)(nil), "feast.specs.ImportSpec.JobOptionsEntry") - proto.RegisterMapType((map[string]string)(nil), "feast.specs.ImportSpec.SourceOptionsEntry") - proto.RegisterType((*Schema)(nil), "feast.specs.Schema") - proto.RegisterType((*Field)(nil), "feast.specs.Field") -} - -func init() { - proto.RegisterFile("feast/specs/ImportSpec.proto", fileDescriptor_ImportSpec_673bc4f248a91137) -} - -var fileDescriptor_ImportSpec_673bc4f248a91137 = []byte{ - // 440 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x8f, 0x93, 0x40, - 0x14, 0x5d, 0xca, 0x16, 0xed, 0x6d, 0xdc, 0x9a, 0xab, 0x0f, 0x84, 0x6c, 0x62, 0xd3, 0x07, 0x6d, - 0x6a, 0x32, 0x93, 0xac, 0x89, 0x51, 0x13, 0x13, 0x53, 0xbf, 0xb6, 0xbe, 0xb8, 0xa1, 0x66, 0x1f, - 0x7c, 0xa3, 0x70, 0x61, 0xd9, 0x05, 0x86, 0x30, 0x83, 0x09, 0x3f, 0xc0, 0x5f, 0xe9, 0x9f, 0x31, - 0x0c, 0x94, 0x52, 0x8c, 0x0f, 0xfb, 0x36, 0xdc, 0x73, 0xee, 0xb9, 0x67, 0x2e, 0x67, 0xe0, 0x3c, - 0x24, 0x4f, 0x2a, 0x2e, 0x73, 0xf2, 0x25, 0xdf, 0xa4, 0xb9, 0x28, 0xd4, 0x36, 0x27, 0x9f, 0xe5, - 0x85, 0x50, 0x02, 0xa7, 0x1a, 0x65, 0x1a, 0x75, 0x9e, 0x45, 0x42, 0x44, 0x09, 0x71, 0x0d, 0xed, - 0xca, 0x90, 0xab, 0x38, 0x25, 0xa9, 0xbc, 0x34, 0x6f, 0xd8, 0x8b, 0xdf, 0x26, 0xc0, 0x41, 0x02, - 0x11, 0x4e, 0x55, 0x95, 0x93, 0x6d, 0xcc, 0x8d, 0xe5, 0xc4, 0xd5, 0x67, 0xbc, 0x82, 0x47, 0x52, - 0x94, 0x85, 0x4f, 0xdf, 0x73, 0x15, 0x8b, 0x4c, 0xda, 0xa3, 0xb9, 0xb9, 0x9c, 0x5e, 0xac, 0x58, - 0x6f, 0x10, 0xeb, 0xd9, 0xd8, 0xf6, 0xc9, 0x9f, 0x33, 0x55, 0x54, 0xee, 0xb1, 0x00, 0x7e, 0x05, - 0xb8, 0x15, 0xbb, 0xbd, 0xdc, 0x58, 0xcb, 0xbd, 0xf8, 0x9f, 0xdc, 0xb7, 0x8e, 0xd9, 0x68, 0xf5, - 0x5a, 0xd1, 0x81, 0x87, 0x94, 0xa9, 0x58, 0xc5, 0x24, 0x6d, 0x73, 0x6e, 0x2e, 0x27, 0x6e, 0xf7, - 0x8d, 0x2f, 0xc1, 0x92, 0xfe, 0x0d, 0xa5, 0x9e, 0x7d, 0x3a, 0x37, 0x96, 0xd3, 0x8b, 0x27, 0x47, - 0x03, 0xb6, 0x1a, 0x72, 0x5b, 0x8a, 0xf3, 0x01, 0xf0, 0x5f, 0xdb, 0xf8, 0x18, 0xcc, 0x3b, 0xaa, - 0xda, 0x65, 0xd4, 0x47, 0x7c, 0x0a, 0xe3, 0x5f, 0x5e, 0x52, 0x92, 0x3d, 0xd2, 0xb5, 0xe6, 0xe3, - 0xdd, 0xe8, 0x8d, 0xe1, 0xbc, 0x87, 0xd9, 0xc0, 0xe9, 0x7d, 0xda, 0x17, 0x7f, 0x0c, 0xb0, 0x1a, - 0x4f, 0xb8, 0x02, 0x2b, 0x8c, 0x29, 0x09, 0xa4, 0x6d, 0xe8, 0xcd, 0xe0, 0x91, 0xf1, 0x2f, 0x35, - 0xe4, 0xb6, 0x0c, 0x5c, 0xc1, 0xac, 0xfb, 0xa3, 0x1f, 0x45, 0x52, 0xa6, 0x99, 0x3d, 0xae, 0xa5, - 0x2f, 0x4f, 0xdc, 0x21, 0x80, 0x9f, 0xe0, 0xac, 0x2b, 0x5d, 0x6b, 0x17, 0x96, 0x5e, 0x8c, 0xc3, - 0x9a, 0x90, 0xb0, 0x7d, 0x48, 0xd8, 0x8f, 0x3d, 0xed, 0xf2, 0xc4, 0x1d, 0xf4, 0xe0, 0x73, 0x38, - 0xd3, 0x2b, 0xae, 0x36, 0x41, 0x3b, 0xf0, 0x81, 0xbe, 0xcb, 0xa0, 0xba, 0x9e, 0xc2, 0xa4, 0xeb, - 0x5c, 0xbc, 0x85, 0xb1, 0xf6, 0x5d, 0xe7, 0x2b, 0xf3, 0xd2, 0x2e, 0x5f, 0xf5, 0x19, 0xcf, 0x61, - 0x12, 0x92, 0xa7, 0xca, 0x82, 0x36, 0x41, 0xbb, 0x98, 0x43, 0x61, 0x7d, 0x0d, 0xfd, 0x40, 0xaf, - 0x67, 0x87, 0x64, 0x5c, 0xd5, 0x76, 0x7f, 0xbe, 0x8e, 0x62, 0x75, 0x53, 0xee, 0x98, 0x2f, 0x52, - 0x1e, 0x89, 0x5b, 0xba, 0xe3, 0xcd, 0xeb, 0xd0, 0x97, 0x91, 0x3c, 0xa2, 0x8c, 0x0a, 0x4f, 0x51, - 0xc0, 0x23, 0xc1, 0x7b, 0xef, 0x66, 0x67, 0x69, 0xc2, 0xab, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, - 0x91, 0xba, 0x56, 0x3c, 0x4d, 0x03, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/StorageSpec.pb.go b/protos/generated/go/feast/specs/StorageSpec.pb.go deleted file mode 100644 index fefe56a1be5..00000000000 --- a/protos/generated/go/feast/specs/StorageSpec.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/specs/StorageSpec.proto - -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type StorageSpec struct { - // unique identifier for this instance - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // type should define what sort of store it is - // e.g. redis, bigquery, etc - Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` - // options contain (but are not restricted to) options like - // connection information. - Options map[string]string `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StorageSpec) Reset() { *m = StorageSpec{} } -func (m *StorageSpec) String() string { return proto.CompactTextString(m) } -func (*StorageSpec) ProtoMessage() {} -func (*StorageSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_StorageSpec_bfb8a5e5cf34de95, []int{0} -} -func (m *StorageSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StorageSpec.Unmarshal(m, b) -} -func (m *StorageSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StorageSpec.Marshal(b, m, deterministic) -} -func (dst *StorageSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_StorageSpec.Merge(dst, src) -} -func (m *StorageSpec) XXX_Size() int { - return xxx_messageInfo_StorageSpec.Size(m) -} -func (m *StorageSpec) XXX_DiscardUnknown() { - xxx_messageInfo_StorageSpec.DiscardUnknown(m) -} - -var xxx_messageInfo_StorageSpec proto.InternalMessageInfo - -func (m *StorageSpec) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *StorageSpec) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *StorageSpec) GetOptions() map[string]string { - if m != nil { - return m.Options - } - return nil -} - -func init() { - proto.RegisterType((*StorageSpec)(nil), "feast.specs.StorageSpec") - proto.RegisterMapType((map[string]string)(nil), "feast.specs.StorageSpec.OptionsEntry") -} - -func init() { - proto.RegisterFile("feast/specs/StorageSpec.proto", fileDescriptor_StorageSpec_bfb8a5e5cf34de95) -} - -var fileDescriptor_StorageSpec_bfb8a5e5cf34de95 = []byte{ - // 227 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4b, 0x4d, 0x2c, - 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x0f, 0x2e, 0xc9, 0x2f, 0x4a, 0x4c, 0x4f, 0x0d, - 0x2e, 0x48, 0x4d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x06, 0x4b, 0xeb, 0x81, 0xa5, - 0x95, 0xd6, 0x31, 0x72, 0x71, 0x23, 0x29, 0x11, 0xe2, 0xe3, 0x62, 0xca, 0x4c, 0x91, 0x60, 0x54, - 0x60, 0xd4, 0xe0, 0x0c, 0x62, 0xca, 0x4c, 0x11, 0x12, 0xe2, 0x62, 0x29, 0xa9, 0x2c, 0x48, 0x95, - 0x60, 0x02, 0x8b, 0x80, 0xd9, 0x42, 0xf6, 0x5c, 0xec, 0xf9, 0x05, 0x25, 0x99, 0xf9, 0x79, 0xc5, - 0x12, 0xcc, 0x0a, 0xcc, 0x1a, 0xdc, 0x46, 0xaa, 0x7a, 0x48, 0x46, 0xea, 0x21, 0xdb, 0xe8, 0x0f, - 0x51, 0xe7, 0x9a, 0x57, 0x52, 0x54, 0x19, 0x04, 0xd3, 0x25, 0x65, 0xc5, 0xc5, 0x83, 0x2c, 0x21, - 0x24, 0xc0, 0xc5, 0x9c, 0x9d, 0x5a, 0x09, 0xb5, 0x15, 0xc4, 0x14, 0x12, 0xe1, 0x62, 0x2d, 0x4b, - 0xcc, 0x29, 0x85, 0xd9, 0x0b, 0xe1, 0x58, 0x31, 0x59, 0x30, 0x3a, 0x85, 0x73, 0x21, 0xbb, 0xdf, - 0x49, 0x00, 0xc9, 0xb6, 0x00, 0x90, 0xf7, 0xa2, 0xcc, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, - 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xb3, 0x52, 0xb3, 0xf5, 0x21, 0xc1, 0x01, 0xf6, 0x7c, 0xb1, - 0x7e, 0x7a, 0x6a, 0x5e, 0x6a, 0x51, 0x62, 0x49, 0x6a, 0x8a, 0x7e, 0x7a, 0xbe, 0x3e, 0x52, 0x40, - 0x25, 0xb1, 0x81, 0x15, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x55, 0xb2, 0x9e, 0xa5, 0x3e, - 0x01, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/storage/BigTable.pb.go b/protos/generated/go/feast/storage/BigTable.pb.go deleted file mode 100644 index 50b0ad906ee..00000000000 --- a/protos/generated/go/feast/storage/BigTable.pb.go +++ /dev/null @@ -1,99 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/storage/BigTable.proto - -package storage // import "github.com/gojek/feast/protos/generated/go/feast/storage" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type BigTableRowKey struct { - // This should be the first 7 characters of a sha1 of the entityKey proto encoded - Sha1Prefix string `protobuf:"bytes,1,opt,name=sha1Prefix,proto3" json:"sha1Prefix,omitempty"` - EntityKey string `protobuf:"bytes,2,opt,name=entityKey,proto3" json:"entityKey,omitempty"` - ReversedMillis string `protobuf:"bytes,3,opt,name=reversedMillis,proto3" json:"reversedMillis,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *BigTableRowKey) Reset() { *m = BigTableRowKey{} } -func (m *BigTableRowKey) String() string { return proto.CompactTextString(m) } -func (*BigTableRowKey) ProtoMessage() {} -func (*BigTableRowKey) Descriptor() ([]byte, []int) { - return fileDescriptor_BigTable_367a7ab40da489b0, []int{0} -} -func (m *BigTableRowKey) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_BigTableRowKey.Unmarshal(m, b) -} -func (m *BigTableRowKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_BigTableRowKey.Marshal(b, m, deterministic) -} -func (dst *BigTableRowKey) XXX_Merge(src proto.Message) { - xxx_messageInfo_BigTableRowKey.Merge(dst, src) -} -func (m *BigTableRowKey) XXX_Size() int { - return xxx_messageInfo_BigTableRowKey.Size(m) -} -func (m *BigTableRowKey) XXX_DiscardUnknown() { - xxx_messageInfo_BigTableRowKey.DiscardUnknown(m) -} - -var xxx_messageInfo_BigTableRowKey proto.InternalMessageInfo - -func (m *BigTableRowKey) GetSha1Prefix() string { - if m != nil { - return m.Sha1Prefix - } - return "" -} - -func (m *BigTableRowKey) GetEntityKey() string { - if m != nil { - return m.EntityKey - } - return "" -} - -func (m *BigTableRowKey) GetReversedMillis() string { - if m != nil { - return m.ReversedMillis - } - return "" -} - -func init() { - proto.RegisterType((*BigTableRowKey)(nil), "feast.storage.BigTableRowKey") -} - -func init() { - proto.RegisterFile("feast/storage/BigTable.proto", fileDescriptor_BigTable_367a7ab40da489b0) -} - -var fileDescriptor_BigTable_367a7ab40da489b0 = []byte{ - // 193 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0xb1, 0x0b, 0x82, 0x40, - 0x18, 0x47, 0xb1, 0x20, 0xf0, 0x40, 0x87, 0x9b, 0x1c, 0x24, 0xa2, 0x21, 0x9a, 0x3c, 0xa2, 0xa5, - 0xd9, 0x55, 0x02, 0x91, 0x86, 0x68, 0x3b, 0xf3, 0xf3, 0xbc, 0x32, 0x2f, 0xee, 0xbe, 0x2c, 0xff, - 0xfb, 0xe8, 0x4a, 0xca, 0xd6, 0xdf, 0xef, 0x0d, 0xef, 0x91, 0xb0, 0x04, 0x6e, 0x90, 0x19, 0x54, - 0x9a, 0x0b, 0x60, 0xb1, 0x14, 0x3b, 0x9e, 0xd7, 0x10, 0x5d, 0xb5, 0x42, 0x45, 0x3d, 0xfb, 0x46, - 0x9f, 0x77, 0xde, 0x12, 0xbf, 0x07, 0x32, 0x75, 0x4f, 0xa0, 0xa3, 0x53, 0x42, 0x4c, 0xc5, 0x57, - 0xa9, 0x86, 0x52, 0x3e, 0x02, 0x67, 0xe6, 0x2c, 0xdd, 0xec, 0x67, 0xa1, 0x21, 0x71, 0xa1, 0x41, - 0x89, 0x5d, 0x02, 0x5d, 0x30, 0xb2, 0xf7, 0x77, 0xa0, 0x0b, 0xe2, 0x6b, 0x68, 0x41, 0x1b, 0x28, - 0xb6, 0xb2, 0xae, 0xa5, 0x09, 0xc6, 0x16, 0xf9, 0x5b, 0xe3, 0x3d, 0x19, 0x8a, 0xc4, 0x5e, 0xaf, - 0x91, 0xbe, 0x34, 0x0f, 0x1b, 0x21, 0xb1, 0xba, 0xe5, 0xd1, 0x51, 0x5d, 0x98, 0x50, 0x27, 0x38, - 0xb3, 0x77, 0x95, 0x8d, 0x30, 0x4c, 0x40, 0x03, 0x9a, 0x23, 0x14, 0x4c, 0x28, 0x36, 0xe8, 0xcd, - 0x27, 0x16, 0x59, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xc5, 0xd9, 0xf3, 0xb2, 0x07, 0x01, 0x00, - 0x00, -} diff --git a/protos/generated/go/feast/storage/Redis.pb.go b/protos/generated/go/feast/storage/Redis.pb.go deleted file mode 100644 index 49f80a061e3..00000000000 --- a/protos/generated/go/feast/storage/Redis.pb.go +++ /dev/null @@ -1,208 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/storage/Redis.proto - -package storage // import "github.com/gojek/feast/protos/generated/go/feast/storage" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type RedisBucketKey struct { - // Entity key from the FeatureRow - EntityKey string `protobuf:"bytes,2,opt,name=entityKey,proto3" json:"entityKey,omitempty"` - // * - // This should be the first 7 characters of a sha1 of the featureId - // This is just to save storage space as it's kept in memory. - FeatureIdSha1Prefix string `protobuf:"bytes,3,opt,name=featureIdSha1Prefix,proto3" json:"featureIdSha1Prefix,omitempty"` - // * - // This groups a feature's values (for different eventTimestamps), - // into buckets so many can be retrieved together. - // - // See FeatureRowToRedisMutationDoFn. - // bucketId = roundedToGranularity(eventTimestamp).seconds / bucketSize.seconds - BucketId uint64 `protobuf:"fixed64,4,opt,name=bucketId,proto3" json:"bucketId,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RedisBucketKey) Reset() { *m = RedisBucketKey{} } -func (m *RedisBucketKey) String() string { return proto.CompactTextString(m) } -func (*RedisBucketKey) ProtoMessage() {} -func (*RedisBucketKey) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{0} -} -func (m *RedisBucketKey) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RedisBucketKey.Unmarshal(m, b) -} -func (m *RedisBucketKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RedisBucketKey.Marshal(b, m, deterministic) -} -func (dst *RedisBucketKey) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketKey.Merge(dst, src) -} -func (m *RedisBucketKey) XXX_Size() int { - return xxx_messageInfo_RedisBucketKey.Size(m) -} -func (m *RedisBucketKey) XXX_DiscardUnknown() { - xxx_messageInfo_RedisBucketKey.DiscardUnknown(m) -} - -var xxx_messageInfo_RedisBucketKey proto.InternalMessageInfo - -func (m *RedisBucketKey) GetEntityKey() string { - if m != nil { - return m.EntityKey - } - return "" -} - -func (m *RedisBucketKey) GetFeatureIdSha1Prefix() string { - if m != nil { - return m.FeatureIdSha1Prefix - } - return "" -} - -func (m *RedisBucketKey) GetBucketId() uint64 { - if m != nil { - return m.BucketId - } - return 0 -} - -// * -// Because in redis features are stored as a key per feature not per -// feature row, we need the event timestamp in the value. -type RedisBucketValue struct { - Value *types.Value `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` - EventTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RedisBucketValue) Reset() { *m = RedisBucketValue{} } -func (m *RedisBucketValue) String() string { return proto.CompactTextString(m) } -func (*RedisBucketValue) ProtoMessage() {} -func (*RedisBucketValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{1} -} -func (m *RedisBucketValue) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RedisBucketValue.Unmarshal(m, b) -} -func (m *RedisBucketValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RedisBucketValue.Marshal(b, m, deterministic) -} -func (dst *RedisBucketValue) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketValue.Merge(dst, src) -} -func (m *RedisBucketValue) XXX_Size() int { - return xxx_messageInfo_RedisBucketValue.Size(m) -} -func (m *RedisBucketValue) XXX_DiscardUnknown() { - xxx_messageInfo_RedisBucketValue.DiscardUnknown(m) -} - -var xxx_messageInfo_RedisBucketValue proto.InternalMessageInfo - -func (m *RedisBucketValue) GetValue() *types.Value { - if m != nil { - return m.Value - } - return nil -} - -func (m *RedisBucketValue) GetEventTimestamp() *timestamp.Timestamp { - if m != nil { - return m.EventTimestamp - } - return nil -} - -// * -// This allows us to group multiple bucket values together in a -// single list to make it easier to keep sets together -type RedisBucketValueList struct { - Values []*RedisBucketValue `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RedisBucketValueList) Reset() { *m = RedisBucketValueList{} } -func (m *RedisBucketValueList) String() string { return proto.CompactTextString(m) } -func (*RedisBucketValueList) ProtoMessage() {} -func (*RedisBucketValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{2} -} -func (m *RedisBucketValueList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RedisBucketValueList.Unmarshal(m, b) -} -func (m *RedisBucketValueList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RedisBucketValueList.Marshal(b, m, deterministic) -} -func (dst *RedisBucketValueList) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketValueList.Merge(dst, src) -} -func (m *RedisBucketValueList) XXX_Size() int { - return xxx_messageInfo_RedisBucketValueList.Size(m) -} -func (m *RedisBucketValueList) XXX_DiscardUnknown() { - xxx_messageInfo_RedisBucketValueList.DiscardUnknown(m) -} - -var xxx_messageInfo_RedisBucketValueList proto.InternalMessageInfo - -func (m *RedisBucketValueList) GetValues() []*RedisBucketValue { - if m != nil { - return m.Values - } - return nil -} - -func init() { - proto.RegisterType((*RedisBucketKey)(nil), "feast.storage.RedisBucketKey") - proto.RegisterType((*RedisBucketValue)(nil), "feast.storage.RedisBucketValue") - proto.RegisterType((*RedisBucketValueList)(nil), "feast.storage.RedisBucketValueList") -} - -func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_Redis_cef62c817c1622ce) } - -var fileDescriptor_Redis_cef62c817c1622ce = []byte{ - // 325 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xcd, 0x4f, 0xf2, 0x40, - 0x10, 0xc6, 0xd3, 0x97, 0x57, 0x22, 0x4b, 0x24, 0x66, 0x35, 0xb1, 0x36, 0x26, 0x34, 0x9c, 0x7a, - 0xda, 0x55, 0x3c, 0xe8, 0xb9, 0x37, 0x82, 0x89, 0xa4, 0x7e, 0x1c, 0xbc, 0x6d, 0xe9, 0x74, 0xa9, - 0x7c, 0x2c, 0xe9, 0x4e, 0x89, 0x4d, 0x3c, 0xf8, 0xa7, 0x1b, 0x66, 0x01, 0x85, 0x78, 0x6b, 0xf7, - 0xf9, 0xcd, 0xcc, 0x33, 0xf3, 0xb0, 0xcb, 0x1c, 0x94, 0x45, 0x69, 0xd1, 0x94, 0x4a, 0x83, 0x4c, - 0x20, 0x2b, 0xac, 0x58, 0x96, 0x06, 0x0d, 0x3f, 0x21, 0x49, 0x6c, 0xa4, 0xa0, 0xab, 0x8d, 0xd1, - 0x33, 0x90, 0x24, 0xa6, 0x55, 0x2e, 0xb1, 0x98, 0x83, 0x45, 0x35, 0x5f, 0x3a, 0x3e, 0xb8, 0x70, - 0xad, 0xb0, 0x5e, 0x82, 0x95, 0xaf, 0x6a, 0x56, 0x81, 0x13, 0x7a, 0x9f, 0xac, 0x43, 0x7d, 0xe3, - 0x6a, 0x3c, 0x05, 0x1c, 0x42, 0xcd, 0xaf, 0x58, 0x0b, 0x16, 0x58, 0x60, 0x3d, 0x84, 0xda, 0xff, - 0x17, 0x7a, 0x51, 0x2b, 0xf9, 0x79, 0xe0, 0xd7, 0xec, 0x2c, 0x07, 0x85, 0x55, 0x09, 0x83, 0xec, - 0x69, 0xa2, 0x6e, 0x46, 0x25, 0xe4, 0xc5, 0x87, 0xdf, 0x20, 0xee, 0x2f, 0x89, 0x07, 0xec, 0x38, - 0xa5, 0xe6, 0x83, 0xcc, 0xff, 0x1f, 0x7a, 0x51, 0x33, 0xd9, 0xfd, 0xf7, 0xbe, 0x3c, 0x76, 0xfa, - 0x6b, 0x3c, 0x19, 0xe3, 0x11, 0x3b, 0x5a, 0xad, 0x3f, 0x7c, 0x2f, 0xf4, 0xa2, 0x76, 0x9f, 0x0b, - 0xb7, 0x2b, 0x79, 0x17, 0x84, 0x24, 0x0e, 0xe0, 0x31, 0xeb, 0xc0, 0x0a, 0x16, 0xf8, 0xbc, 0xdd, - 0x96, 0xfc, 0xb6, 0xfb, 0x81, 0x70, 0xf7, 0x10, 0xdb, 0x7b, 0x88, 0x1d, 0x91, 0x1c, 0x54, 0xf4, - 0x1e, 0xd9, 0xf9, 0xa1, 0x83, 0x87, 0xc2, 0x22, 0xbf, 0x63, 0x4d, 0x1a, 0x62, 0x7d, 0x2f, 0x6c, - 0x44, 0xed, 0x7e, 0x57, 0xec, 0x9d, 0x5c, 0x1c, 0x16, 0x25, 0x1b, 0x3c, 0x7e, 0x61, 0xfb, 0xe1, - 0xc4, 0x8c, 0xd0, 0xd1, 0xda, 0xca, 0xdb, 0xbd, 0x2e, 0x70, 0x52, 0xa5, 0x62, 0x6c, 0xe6, 0x52, - 0x9b, 0x77, 0x98, 0x4a, 0x17, 0x0c, 0x19, 0xb5, 0x52, 0xc3, 0x02, 0x4a, 0x85, 0x90, 0x49, 0x6d, - 0xe4, 0x5e, 0xfa, 0x69, 0x93, 0x90, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x28, 0x5b, 0x47, - 0xf9, 0x15, 0x02, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/types/Feature.pb.go b/protos/generated/go/feast/types/Feature.pb.go deleted file mode 100644 index 51ed41fb54f..00000000000 --- a/protos/generated/go/feast/types/Feature.pb.go +++ /dev/null @@ -1,86 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/types/Feature.proto - -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type Feature struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Feature) Reset() { *m = Feature{} } -func (m *Feature) String() string { return proto.CompactTextString(m) } -func (*Feature) ProtoMessage() {} -func (*Feature) Descriptor() ([]byte, []int) { - return fileDescriptor_Feature_c2a5d99d9bf3ca9c, []int{0} -} -func (m *Feature) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Feature.Unmarshal(m, b) -} -func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Feature.Marshal(b, m, deterministic) -} -func (dst *Feature) XXX_Merge(src proto.Message) { - xxx_messageInfo_Feature.Merge(dst, src) -} -func (m *Feature) XXX_Size() int { - return xxx_messageInfo_Feature.Size(m) -} -func (m *Feature) XXX_DiscardUnknown() { - xxx_messageInfo_Feature.DiscardUnknown(m) -} - -var xxx_messageInfo_Feature proto.InternalMessageInfo - -func (m *Feature) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *Feature) GetValue() *Value { - if m != nil { - return m.Value - } - return nil -} - -func init() { - proto.RegisterType((*Feature)(nil), "feast.types.Feature") -} - -func init() { proto.RegisterFile("feast/types/Feature.proto", fileDescriptor_Feature_c2a5d99d9bf3ca9c) } - -var fileDescriptor_Feature_c2a5d99d9bf3ca9c = []byte{ - // 173 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0x4b, 0x4d, 0x2c, - 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0xd5, - 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x06, 0x4b, 0xe9, 0x81, 0xa5, 0xa4, 0xc4, 0x91, 0xd5, - 0x85, 0x25, 0xe6, 0x94, 0x42, 0x55, 0x29, 0x39, 0x73, 0xb1, 0x43, 0xb5, 0x09, 0xf1, 0x71, 0x31, - 0x65, 0xa6, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0x31, 0x65, 0xa6, 0x08, 0x69, 0x70, 0xb1, - 0x96, 0x81, 0x54, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x09, 0xe9, 0x21, 0x19, 0xa8, 0x07, - 0x36, 0x23, 0x08, 0xa2, 0xc0, 0x29, 0x98, 0x0b, 0xd9, 0x32, 0x27, 0x1e, 0xa8, 0x89, 0x01, 0x20, - 0x1b, 0xa2, 0xcc, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, - 0xb3, 0x52, 0xb3, 0xf5, 0x21, 0x6e, 0x01, 0xdb, 0x5f, 0xac, 0x9f, 0x9e, 0x9a, 0x97, 0x5a, 0x94, - 0x58, 0x92, 0x9a, 0xa2, 0x9f, 0x9e, 0xaf, 0x8f, 0xe4, 0xca, 0x24, 0x36, 0xb0, 0x02, 0x63, 0x40, - 0x00, 0x00, 0x00, 0xff, 0xff, 0x72, 0x5f, 0x2d, 0xd9, 0xe3, 0x00, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/types/FeatureRow.pb.go b/protos/generated/go/feast/types/FeatureRow.pb.go deleted file mode 100644 index b2f9f5de401..00000000000 --- a/protos/generated/go/feast/types/FeatureRow.pb.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/types/FeatureRow.proto - -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FeatureRow struct { - EntityKey string `protobuf:"bytes,1,opt,name=entityKey,proto3" json:"entityKey,omitempty"` - Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` - EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` - EntityName string `protobuf:"bytes,4,opt,name=entityName,proto3" json:"entityName,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FeatureRow) Reset() { *m = FeatureRow{} } -func (m *FeatureRow) String() string { return proto.CompactTextString(m) } -func (*FeatureRow) ProtoMessage() {} -func (*FeatureRow) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRow_b534c07ebff1be93, []int{0} -} -func (m *FeatureRow) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureRow.Unmarshal(m, b) -} -func (m *FeatureRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureRow.Marshal(b, m, deterministic) -} -func (dst *FeatureRow) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureRow.Merge(dst, src) -} -func (m *FeatureRow) XXX_Size() int { - return xxx_messageInfo_FeatureRow.Size(m) -} -func (m *FeatureRow) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureRow.DiscardUnknown(m) -} - -var xxx_messageInfo_FeatureRow proto.InternalMessageInfo - -func (m *FeatureRow) GetEntityKey() string { - if m != nil { - return m.EntityKey - } - return "" -} - -func (m *FeatureRow) GetFeatures() []*Feature { - if m != nil { - return m.Features - } - return nil -} - -func (m *FeatureRow) GetEventTimestamp() *timestamp.Timestamp { - if m != nil { - return m.EventTimestamp - } - return nil -} - -func (m *FeatureRow) GetEntityName() string { - if m != nil { - return m.EntityName - } - return "" -} - -func init() { - proto.RegisterType((*FeatureRow)(nil), "feast.types.FeatureRow") -} - -func init() { - proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_FeatureRow_b534c07ebff1be93) -} - -var fileDescriptor_FeatureRow_b534c07ebff1be93 = []byte{ - // 248 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xc1, 0x4a, 0x85, 0x40, - 0x14, 0x86, 0xb1, 0x1b, 0xd1, 0x3d, 0x42, 0xc1, 0xd0, 0xc2, 0xe4, 0x52, 0xd2, 0xca, 0xd5, 0x9c, - 0xb8, 0x41, 0x0f, 0xe0, 0xa2, 0x4d, 0x10, 0x21, 0xd1, 0xa2, 0xdd, 0x58, 0xc7, 0xc9, 0x4a, 0x47, - 0x9c, 0x63, 0xe1, 0xdb, 0xf5, 0x68, 0xd1, 0x0c, 0x5e, 0x87, 0x68, 0x7b, 0xfe, 0xcf, 0xdf, 0x6f, - 0x7e, 0xd8, 0xd4, 0xa4, 0x2c, 0x23, 0x4f, 0x3d, 0x59, 0xbc, 0x21, 0xc5, 0xe3, 0x40, 0xa5, 0xf9, - 0x92, 0xfd, 0x60, 0xd8, 0x88, 0xd8, 0xa5, 0xd2, 0xa5, 0xe9, 0xb9, 0x36, 0x46, 0x7f, 0x10, 0xba, - 0xa8, 0x1a, 0x6b, 0xe4, 0xa6, 0x25, 0xcb, 0xaa, 0xed, 0x3d, 0x9d, 0x9e, 0xfe, 0xd3, 0xe5, 0xa3, - 0x8b, 0xef, 0x08, 0x60, 0x69, 0x17, 0x1b, 0x58, 0x53, 0xc7, 0x0d, 0x4f, 0xb7, 0x34, 0x25, 0x51, - 0x16, 0xe5, 0xeb, 0x72, 0x39, 0x88, 0x4b, 0x38, 0xac, 0x3d, 0x6b, 0x93, 0xbd, 0x6c, 0x95, 0xc7, - 0xdb, 0x13, 0x19, 0x88, 0xc8, 0xb9, 0x68, 0x47, 0x89, 0x02, 0x8e, 0xe8, 0x93, 0x3a, 0x7e, 0x98, - 0x8d, 0x92, 0x55, 0x16, 0xe5, 0xf1, 0x36, 0x95, 0xde, 0x59, 0xce, 0xce, 0x72, 0x47, 0x94, 0x7f, - 0xbe, 0x10, 0x67, 0x00, 0x5e, 0xe1, 0x4e, 0xb5, 0x94, 0xec, 0x3b, 0xa9, 0xe0, 0x52, 0x3c, 0x42, - 0xb8, 0x46, 0x71, 0xbc, 0x3c, 0xe7, 0xfe, 0xb7, 0xfc, 0xe9, 0x5a, 0x37, 0xfc, 0x3a, 0x56, 0xf2, - 0xd9, 0xb4, 0xa8, 0xcd, 0x1b, 0xbd, 0xa3, 0x9f, 0xc3, 0xfd, 0xda, 0xa2, 0xa6, 0x8e, 0x06, 0xc5, - 0xf4, 0x82, 0xda, 0x60, 0x30, 0x54, 0x75, 0xe0, 0x80, 0xab, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, - 0x1d, 0xcd, 0xff, 0x6c, 0x8a, 0x01, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/types/Value.pb.go b/protos/generated/go/feast/types/Value.pb.go deleted file mode 100644 index b9c26f8c4b8..00000000000 --- a/protos/generated/go/feast/types/Value.pb.go +++ /dev/null @@ -1,1116 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/types/Value.proto - -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type ValueType_Enum int32 - -const ( - ValueType_UNKNOWN ValueType_Enum = 0 - ValueType_BYTES ValueType_Enum = 1 - ValueType_STRING ValueType_Enum = 2 - ValueType_INT32 ValueType_Enum = 3 - ValueType_INT64 ValueType_Enum = 4 - ValueType_DOUBLE ValueType_Enum = 5 - ValueType_FLOAT ValueType_Enum = 6 - ValueType_BOOL ValueType_Enum = 7 - ValueType_TIMESTAMP ValueType_Enum = 8 -) - -var ValueType_Enum_name = map[int32]string{ - 0: "UNKNOWN", - 1: "BYTES", - 2: "STRING", - 3: "INT32", - 4: "INT64", - 5: "DOUBLE", - 6: "FLOAT", - 7: "BOOL", - 8: "TIMESTAMP", -} -var ValueType_Enum_value = map[string]int32{ - "UNKNOWN": 0, - "BYTES": 1, - "STRING": 2, - "INT32": 3, - "INT64": 4, - "DOUBLE": 5, - "FLOAT": 6, - "BOOL": 7, - "TIMESTAMP": 8, -} - -func (x ValueType_Enum) String() string { - return proto.EnumName(ValueType_Enum_name, int32(x)) -} -func (ValueType_Enum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{0, 0} -} - -type ValueType struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ValueType) Reset() { *m = ValueType{} } -func (m *ValueType) String() string { return proto.CompactTextString(m) } -func (*ValueType) ProtoMessage() {} -func (*ValueType) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{0} -} -func (m *ValueType) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ValueType.Unmarshal(m, b) -} -func (m *ValueType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ValueType.Marshal(b, m, deterministic) -} -func (dst *ValueType) XXX_Merge(src proto.Message) { - xxx_messageInfo_ValueType.Merge(dst, src) -} -func (m *ValueType) XXX_Size() int { - return xxx_messageInfo_ValueType.Size(m) -} -func (m *ValueType) XXX_DiscardUnknown() { - xxx_messageInfo_ValueType.DiscardUnknown(m) -} - -var xxx_messageInfo_ValueType proto.InternalMessageInfo - -type Value struct { - // Types that are valid to be assigned to Val: - // *Value_BytesVal - // *Value_StringVal - // *Value_Int32Val - // *Value_Int64Val - // *Value_DoubleVal - // *Value_FloatVal - // *Value_BoolVal - // *Value_TimestampVal - Val isValue_Val `protobuf_oneof:"val"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Value) Reset() { *m = Value{} } -func (m *Value) String() string { return proto.CompactTextString(m) } -func (*Value) ProtoMessage() {} -func (*Value) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{1} -} -func (m *Value) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Value.Unmarshal(m, b) -} -func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Value.Marshal(b, m, deterministic) -} -func (dst *Value) XXX_Merge(src proto.Message) { - xxx_messageInfo_Value.Merge(dst, src) -} -func (m *Value) XXX_Size() int { - return xxx_messageInfo_Value.Size(m) -} -func (m *Value) XXX_DiscardUnknown() { - xxx_messageInfo_Value.DiscardUnknown(m) -} - -var xxx_messageInfo_Value proto.InternalMessageInfo - -type isValue_Val interface { - isValue_Val() -} - -type Value_BytesVal struct { - BytesVal []byte `protobuf:"bytes,1,opt,name=bytesVal,proto3,oneof"` -} - -type Value_StringVal struct { - StringVal string `protobuf:"bytes,2,opt,name=stringVal,proto3,oneof"` -} - -type Value_Int32Val struct { - Int32Val int32 `protobuf:"varint,3,opt,name=int32Val,proto3,oneof"` -} - -type Value_Int64Val struct { - Int64Val int64 `protobuf:"varint,4,opt,name=int64Val,proto3,oneof"` -} - -type Value_DoubleVal struct { - DoubleVal float64 `protobuf:"fixed64,5,opt,name=doubleVal,proto3,oneof"` -} - -type Value_FloatVal struct { - FloatVal float32 `protobuf:"fixed32,6,opt,name=floatVal,proto3,oneof"` -} - -type Value_BoolVal struct { - BoolVal bool `protobuf:"varint,7,opt,name=boolVal,proto3,oneof"` -} - -type Value_TimestampVal struct { - TimestampVal *timestamp.Timestamp `protobuf:"bytes,8,opt,name=timestampVal,proto3,oneof"` -} - -func (*Value_BytesVal) isValue_Val() {} - -func (*Value_StringVal) isValue_Val() {} - -func (*Value_Int32Val) isValue_Val() {} - -func (*Value_Int64Val) isValue_Val() {} - -func (*Value_DoubleVal) isValue_Val() {} - -func (*Value_FloatVal) isValue_Val() {} - -func (*Value_BoolVal) isValue_Val() {} - -func (*Value_TimestampVal) isValue_Val() {} - -func (m *Value) GetVal() isValue_Val { - if m != nil { - return m.Val - } - return nil -} - -func (m *Value) GetBytesVal() []byte { - if x, ok := m.GetVal().(*Value_BytesVal); ok { - return x.BytesVal - } - return nil -} - -func (m *Value) GetStringVal() string { - if x, ok := m.GetVal().(*Value_StringVal); ok { - return x.StringVal - } - return "" -} - -func (m *Value) GetInt32Val() int32 { - if x, ok := m.GetVal().(*Value_Int32Val); ok { - return x.Int32Val - } - return 0 -} - -func (m *Value) GetInt64Val() int64 { - if x, ok := m.GetVal().(*Value_Int64Val); ok { - return x.Int64Val - } - return 0 -} - -func (m *Value) GetDoubleVal() float64 { - if x, ok := m.GetVal().(*Value_DoubleVal); ok { - return x.DoubleVal - } - return 0 -} - -func (m *Value) GetFloatVal() float32 { - if x, ok := m.GetVal().(*Value_FloatVal); ok { - return x.FloatVal - } - return 0 -} - -func (m *Value) GetBoolVal() bool { - if x, ok := m.GetVal().(*Value_BoolVal); ok { - return x.BoolVal - } - return false -} - -func (m *Value) GetTimestampVal() *timestamp.Timestamp { - if x, ok := m.GetVal().(*Value_TimestampVal); ok { - return x.TimestampVal - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ - (*Value_BytesVal)(nil), - (*Value_StringVal)(nil), - (*Value_Int32Val)(nil), - (*Value_Int64Val)(nil), - (*Value_DoubleVal)(nil), - (*Value_FloatVal)(nil), - (*Value_BoolVal)(nil), - (*Value_TimestampVal)(nil), - } -} - -func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Value) - // val - switch x := m.Val.(type) { - case *Value_BytesVal: - b.EncodeVarint(1<<3 | proto.WireBytes) - b.EncodeRawBytes(x.BytesVal) - case *Value_StringVal: - b.EncodeVarint(2<<3 | proto.WireBytes) - b.EncodeStringBytes(x.StringVal) - case *Value_Int32Val: - b.EncodeVarint(3<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Int32Val)) - case *Value_Int64Val: - b.EncodeVarint(4<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Int64Val)) - case *Value_DoubleVal: - b.EncodeVarint(5<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.DoubleVal)) - case *Value_FloatVal: - b.EncodeVarint(6<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.FloatVal))) - case *Value_BoolVal: - t := uint64(0) - if x.BoolVal { - t = 1 - } - b.EncodeVarint(7<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Value_TimestampVal: - b.EncodeVarint(8<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampVal); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Value.Val has unexpected type %T", x) - } - return nil -} - -func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Value) - switch tag { - case 1: // val.bytesVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Val = &Value_BytesVal{x} - return true, err - case 2: // val.stringVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Val = &Value_StringVal{x} - return true, err - case 3: // val.int32Val - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_Int32Val{int32(x)} - return true, err - case 4: // val.int64Val - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_Int64Val{int64(x)} - return true, err - case 5: // val.doubleVal - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Val = &Value_DoubleVal{math.Float64frombits(x)} - return true, err - case 6: // val.floatVal - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Val = &Value_FloatVal{math.Float32frombits(uint32(x))} - return true, err - case 7: // val.boolVal - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_BoolVal{x != 0} - return true, err - case 8: // val.timestampVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(timestamp.Timestamp) - err := b.DecodeMessage(msg) - m.Val = &Value_TimestampVal{msg} - return true, err - default: - return false, nil - } -} - -func _Value_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Value) - // val - switch x := m.Val.(type) { - case *Value_BytesVal: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.BytesVal))) - n += len(x.BytesVal) - case *Value_StringVal: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.StringVal))) - n += len(x.StringVal) - case *Value_Int32Val: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(x.Int32Val)) - case *Value_Int64Val: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(x.Int64Val)) - case *Value_DoubleVal: - n += 1 // tag and wire - n += 8 - case *Value_FloatVal: - n += 1 // tag and wire - n += 4 - case *Value_BoolVal: - n += 1 // tag and wire - n += 1 - case *Value_TimestampVal: - s := proto.Size(x.TimestampVal) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type ValueList struct { - // Types that are valid to be assigned to ValueList: - // *ValueList_BytesList - // *ValueList_StringList - // *ValueList_Int32List - // *ValueList_Int64List - // *ValueList_DoubleList - // *ValueList_FloatList - // *ValueList_BoolList - // *ValueList_TimestampList - ValueList isValueList_ValueList `protobuf_oneof:"valueList"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ValueList) Reset() { *m = ValueList{} } -func (m *ValueList) String() string { return proto.CompactTextString(m) } -func (*ValueList) ProtoMessage() {} -func (*ValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{2} -} -func (m *ValueList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ValueList.Unmarshal(m, b) -} -func (m *ValueList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ValueList.Marshal(b, m, deterministic) -} -func (dst *ValueList) XXX_Merge(src proto.Message) { - xxx_messageInfo_ValueList.Merge(dst, src) -} -func (m *ValueList) XXX_Size() int { - return xxx_messageInfo_ValueList.Size(m) -} -func (m *ValueList) XXX_DiscardUnknown() { - xxx_messageInfo_ValueList.DiscardUnknown(m) -} - -var xxx_messageInfo_ValueList proto.InternalMessageInfo - -type isValueList_ValueList interface { - isValueList_ValueList() -} - -type ValueList_BytesList struct { - BytesList *BytesList `protobuf:"bytes,1,opt,name=bytesList,proto3,oneof"` -} - -type ValueList_StringList struct { - StringList *StringList `protobuf:"bytes,2,opt,name=stringList,proto3,oneof"` -} - -type ValueList_Int32List struct { - Int32List *Int32List `protobuf:"bytes,3,opt,name=int32List,proto3,oneof"` -} - -type ValueList_Int64List struct { - Int64List *Int64List `protobuf:"bytes,4,opt,name=int64List,proto3,oneof"` -} - -type ValueList_DoubleList struct { - DoubleList *DoubleList `protobuf:"bytes,5,opt,name=doubleList,proto3,oneof"` -} - -type ValueList_FloatList struct { - FloatList *FloatList `protobuf:"bytes,6,opt,name=floatList,proto3,oneof"` -} - -type ValueList_BoolList struct { - BoolList *BoolList `protobuf:"bytes,7,opt,name=boolList,proto3,oneof"` -} - -type ValueList_TimestampList struct { - TimestampList *TimestampList `protobuf:"bytes,8,opt,name=timestampList,proto3,oneof"` -} - -func (*ValueList_BytesList) isValueList_ValueList() {} - -func (*ValueList_StringList) isValueList_ValueList() {} - -func (*ValueList_Int32List) isValueList_ValueList() {} - -func (*ValueList_Int64List) isValueList_ValueList() {} - -func (*ValueList_DoubleList) isValueList_ValueList() {} - -func (*ValueList_FloatList) isValueList_ValueList() {} - -func (*ValueList_BoolList) isValueList_ValueList() {} - -func (*ValueList_TimestampList) isValueList_ValueList() {} - -func (m *ValueList) GetValueList() isValueList_ValueList { - if m != nil { - return m.ValueList - } - return nil -} - -func (m *ValueList) GetBytesList() *BytesList { - if x, ok := m.GetValueList().(*ValueList_BytesList); ok { - return x.BytesList - } - return nil -} - -func (m *ValueList) GetStringList() *StringList { - if x, ok := m.GetValueList().(*ValueList_StringList); ok { - return x.StringList - } - return nil -} - -func (m *ValueList) GetInt32List() *Int32List { - if x, ok := m.GetValueList().(*ValueList_Int32List); ok { - return x.Int32List - } - return nil -} - -func (m *ValueList) GetInt64List() *Int64List { - if x, ok := m.GetValueList().(*ValueList_Int64List); ok { - return x.Int64List - } - return nil -} - -func (m *ValueList) GetDoubleList() *DoubleList { - if x, ok := m.GetValueList().(*ValueList_DoubleList); ok { - return x.DoubleList - } - return nil -} - -func (m *ValueList) GetFloatList() *FloatList { - if x, ok := m.GetValueList().(*ValueList_FloatList); ok { - return x.FloatList - } - return nil -} - -func (m *ValueList) GetBoolList() *BoolList { - if x, ok := m.GetValueList().(*ValueList_BoolList); ok { - return x.BoolList - } - return nil -} - -func (m *ValueList) GetTimestampList() *TimestampList { - if x, ok := m.GetValueList().(*ValueList_TimestampList); ok { - return x.TimestampList - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*ValueList) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _ValueList_OneofMarshaler, _ValueList_OneofUnmarshaler, _ValueList_OneofSizer, []interface{}{ - (*ValueList_BytesList)(nil), - (*ValueList_StringList)(nil), - (*ValueList_Int32List)(nil), - (*ValueList_Int64List)(nil), - (*ValueList_DoubleList)(nil), - (*ValueList_FloatList)(nil), - (*ValueList_BoolList)(nil), - (*ValueList_TimestampList)(nil), - } -} - -func _ValueList_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*ValueList) - // valueList - switch x := m.ValueList.(type) { - case *ValueList_BytesList: - b.EncodeVarint(1<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.BytesList); err != nil { - return err - } - case *ValueList_StringList: - b.EncodeVarint(2<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.StringList); err != nil { - return err - } - case *ValueList_Int32List: - b.EncodeVarint(3<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Int32List); err != nil { - return err - } - case *ValueList_Int64List: - b.EncodeVarint(4<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Int64List); err != nil { - return err - } - case *ValueList_DoubleList: - b.EncodeVarint(5<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.DoubleList); err != nil { - return err - } - case *ValueList_FloatList: - b.EncodeVarint(6<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.FloatList); err != nil { - return err - } - case *ValueList_BoolList: - b.EncodeVarint(7<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.BoolList); err != nil { - return err - } - case *ValueList_TimestampList: - b.EncodeVarint(8<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampList); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("ValueList.ValueList has unexpected type %T", x) - } - return nil -} - -func _ValueList_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*ValueList) - switch tag { - case 1: // valueList.bytesList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(BytesList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_BytesList{msg} - return true, err - case 2: // valueList.stringList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(StringList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_StringList{msg} - return true, err - case 3: // valueList.int32List - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Int32List) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_Int32List{msg} - return true, err - case 4: // valueList.int64List - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Int64List) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_Int64List{msg} - return true, err - case 5: // valueList.doubleList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(DoubleList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_DoubleList{msg} - return true, err - case 6: // valueList.floatList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(FloatList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_FloatList{msg} - return true, err - case 7: // valueList.boolList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(BoolList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_BoolList{msg} - return true, err - case 8: // valueList.timestampList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(TimestampList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_TimestampList{msg} - return true, err - default: - return false, nil - } -} - -func _ValueList_OneofSizer(msg proto.Message) (n int) { - m := msg.(*ValueList) - // valueList - switch x := m.ValueList.(type) { - case *ValueList_BytesList: - s := proto.Size(x.BytesList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_StringList: - s := proto.Size(x.StringList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_Int32List: - s := proto.Size(x.Int32List) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_Int64List: - s := proto.Size(x.Int64List) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_DoubleList: - s := proto.Size(x.DoubleList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_FloatList: - s := proto.Size(x.FloatList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_BoolList: - s := proto.Size(x.BoolList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_TimestampList: - s := proto.Size(x.TimestampList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -type BytesList struct { - Val [][]byte `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *BytesList) Reset() { *m = BytesList{} } -func (m *BytesList) String() string { return proto.CompactTextString(m) } -func (*BytesList) ProtoMessage() {} -func (*BytesList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{3} -} -func (m *BytesList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_BytesList.Unmarshal(m, b) -} -func (m *BytesList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_BytesList.Marshal(b, m, deterministic) -} -func (dst *BytesList) XXX_Merge(src proto.Message) { - xxx_messageInfo_BytesList.Merge(dst, src) -} -func (m *BytesList) XXX_Size() int { - return xxx_messageInfo_BytesList.Size(m) -} -func (m *BytesList) XXX_DiscardUnknown() { - xxx_messageInfo_BytesList.DiscardUnknown(m) -} - -var xxx_messageInfo_BytesList proto.InternalMessageInfo - -func (m *BytesList) GetVal() [][]byte { - if m != nil { - return m.Val - } - return nil -} - -type StringList struct { - Val []string `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StringList) Reset() { *m = StringList{} } -func (m *StringList) String() string { return proto.CompactTextString(m) } -func (*StringList) ProtoMessage() {} -func (*StringList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{4} -} -func (m *StringList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StringList.Unmarshal(m, b) -} -func (m *StringList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StringList.Marshal(b, m, deterministic) -} -func (dst *StringList) XXX_Merge(src proto.Message) { - xxx_messageInfo_StringList.Merge(dst, src) -} -func (m *StringList) XXX_Size() int { - return xxx_messageInfo_StringList.Size(m) -} -func (m *StringList) XXX_DiscardUnknown() { - xxx_messageInfo_StringList.DiscardUnknown(m) -} - -var xxx_messageInfo_StringList proto.InternalMessageInfo - -func (m *StringList) GetVal() []string { - if m != nil { - return m.Val - } - return nil -} - -type Int32List struct { - Val []int32 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Int32List) Reset() { *m = Int32List{} } -func (m *Int32List) String() string { return proto.CompactTextString(m) } -func (*Int32List) ProtoMessage() {} -func (*Int32List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{5} -} -func (m *Int32List) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Int32List.Unmarshal(m, b) -} -func (m *Int32List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Int32List.Marshal(b, m, deterministic) -} -func (dst *Int32List) XXX_Merge(src proto.Message) { - xxx_messageInfo_Int32List.Merge(dst, src) -} -func (m *Int32List) XXX_Size() int { - return xxx_messageInfo_Int32List.Size(m) -} -func (m *Int32List) XXX_DiscardUnknown() { - xxx_messageInfo_Int32List.DiscardUnknown(m) -} - -var xxx_messageInfo_Int32List proto.InternalMessageInfo - -func (m *Int32List) GetVal() []int32 { - if m != nil { - return m.Val - } - return nil -} - -type Int64List struct { - Val []int64 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Int64List) Reset() { *m = Int64List{} } -func (m *Int64List) String() string { return proto.CompactTextString(m) } -func (*Int64List) ProtoMessage() {} -func (*Int64List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{6} -} -func (m *Int64List) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Int64List.Unmarshal(m, b) -} -func (m *Int64List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Int64List.Marshal(b, m, deterministic) -} -func (dst *Int64List) XXX_Merge(src proto.Message) { - xxx_messageInfo_Int64List.Merge(dst, src) -} -func (m *Int64List) XXX_Size() int { - return xxx_messageInfo_Int64List.Size(m) -} -func (m *Int64List) XXX_DiscardUnknown() { - xxx_messageInfo_Int64List.DiscardUnknown(m) -} - -var xxx_messageInfo_Int64List proto.InternalMessageInfo - -func (m *Int64List) GetVal() []int64 { - if m != nil { - return m.Val - } - return nil -} - -type DoubleList struct { - Val []float64 `protobuf:"fixed64,1,rep,packed,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DoubleList) Reset() { *m = DoubleList{} } -func (m *DoubleList) String() string { return proto.CompactTextString(m) } -func (*DoubleList) ProtoMessage() {} -func (*DoubleList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{7} -} -func (m *DoubleList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DoubleList.Unmarshal(m, b) -} -func (m *DoubleList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DoubleList.Marshal(b, m, deterministic) -} -func (dst *DoubleList) XXX_Merge(src proto.Message) { - xxx_messageInfo_DoubleList.Merge(dst, src) -} -func (m *DoubleList) XXX_Size() int { - return xxx_messageInfo_DoubleList.Size(m) -} -func (m *DoubleList) XXX_DiscardUnknown() { - xxx_messageInfo_DoubleList.DiscardUnknown(m) -} - -var xxx_messageInfo_DoubleList proto.InternalMessageInfo - -func (m *DoubleList) GetVal() []float64 { - if m != nil { - return m.Val - } - return nil -} - -type FloatList struct { - Val []float32 `protobuf:"fixed32,1,rep,packed,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FloatList) Reset() { *m = FloatList{} } -func (m *FloatList) String() string { return proto.CompactTextString(m) } -func (*FloatList) ProtoMessage() {} -func (*FloatList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{8} -} -func (m *FloatList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FloatList.Unmarshal(m, b) -} -func (m *FloatList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FloatList.Marshal(b, m, deterministic) -} -func (dst *FloatList) XXX_Merge(src proto.Message) { - xxx_messageInfo_FloatList.Merge(dst, src) -} -func (m *FloatList) XXX_Size() int { - return xxx_messageInfo_FloatList.Size(m) -} -func (m *FloatList) XXX_DiscardUnknown() { - xxx_messageInfo_FloatList.DiscardUnknown(m) -} - -var xxx_messageInfo_FloatList proto.InternalMessageInfo - -func (m *FloatList) GetVal() []float32 { - if m != nil { - return m.Val - } - return nil -} - -type BoolList struct { - Val []bool `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *BoolList) Reset() { *m = BoolList{} } -func (m *BoolList) String() string { return proto.CompactTextString(m) } -func (*BoolList) ProtoMessage() {} -func (*BoolList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{9} -} -func (m *BoolList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_BoolList.Unmarshal(m, b) -} -func (m *BoolList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_BoolList.Marshal(b, m, deterministic) -} -func (dst *BoolList) XXX_Merge(src proto.Message) { - xxx_messageInfo_BoolList.Merge(dst, src) -} -func (m *BoolList) XXX_Size() int { - return xxx_messageInfo_BoolList.Size(m) -} -func (m *BoolList) XXX_DiscardUnknown() { - xxx_messageInfo_BoolList.DiscardUnknown(m) -} - -var xxx_messageInfo_BoolList proto.InternalMessageInfo - -func (m *BoolList) GetVal() []bool { - if m != nil { - return m.Val - } - return nil -} - -type TimestampList struct { - Val []*timestamp.Timestamp `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TimestampList) Reset() { *m = TimestampList{} } -func (m *TimestampList) String() string { return proto.CompactTextString(m) } -func (*TimestampList) ProtoMessage() {} -func (*TimestampList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{10} -} -func (m *TimestampList) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TimestampList.Unmarshal(m, b) -} -func (m *TimestampList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TimestampList.Marshal(b, m, deterministic) -} -func (dst *TimestampList) XXX_Merge(src proto.Message) { - xxx_messageInfo_TimestampList.Merge(dst, src) -} -func (m *TimestampList) XXX_Size() int { - return xxx_messageInfo_TimestampList.Size(m) -} -func (m *TimestampList) XXX_DiscardUnknown() { - xxx_messageInfo_TimestampList.DiscardUnknown(m) -} - -var xxx_messageInfo_TimestampList proto.InternalMessageInfo - -func (m *TimestampList) GetVal() []*timestamp.Timestamp { - if m != nil { - return m.Val - } - return nil -} - -func init() { - proto.RegisterType((*ValueType)(nil), "feast.types.ValueType") - proto.RegisterType((*Value)(nil), "feast.types.Value") - proto.RegisterType((*ValueList)(nil), "feast.types.ValueList") - proto.RegisterType((*BytesList)(nil), "feast.types.BytesList") - proto.RegisterType((*StringList)(nil), "feast.types.StringList") - proto.RegisterType((*Int32List)(nil), "feast.types.Int32List") - proto.RegisterType((*Int64List)(nil), "feast.types.Int64List") - proto.RegisterType((*DoubleList)(nil), "feast.types.DoubleList") - proto.RegisterType((*FloatList)(nil), "feast.types.FloatList") - proto.RegisterType((*BoolList)(nil), "feast.types.BoolList") - proto.RegisterType((*TimestampList)(nil), "feast.types.TimestampList") - proto.RegisterEnum("feast.types.ValueType_Enum", ValueType_Enum_name, ValueType_Enum_value) -} - -func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_Value_0680a2f024df1112) } - -var fileDescriptor_Value_0680a2f024df1112 = []byte{ - // 626 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xd1, 0x6f, 0x9a, 0x50, - 0x14, 0xc6, 0xb9, 0x22, 0x0a, 0xc7, 0x36, 0x21, 0x37, 0xd9, 0xda, 0x34, 0x6d, 0x47, 0x7c, 0xe2, - 0x61, 0x81, 0x44, 0x1b, 0x93, 0x3d, 0x2c, 0x59, 0x49, 0xed, 0x34, 0xb3, 0xda, 0x21, 0xed, 0xb2, - 0xbd, 0xc1, 0x4a, 0x99, 0x1b, 0x8a, 0x29, 0xd8, 0xa4, 0x0f, 0xfb, 0x6f, 0xf6, 0xbf, 0xed, 0xdf, - 0x58, 0xce, 0x01, 0xae, 0xd7, 0xc4, 0xec, 0x4d, 0xce, 0xef, 0xfb, 0xee, 0xc1, 0xef, 0x03, 0xe0, - 0xe8, 0x31, 0x0e, 0xf3, 0xc2, 0x2d, 0x5e, 0xd6, 0x71, 0xee, 0xde, 0x87, 0xe9, 0x26, 0x76, 0xd6, - 0x4f, 0x59, 0x91, 0xf1, 0x0e, 0x01, 0x87, 0xc0, 0xc9, 0x9b, 0x24, 0xcb, 0x92, 0x34, 0x76, 0x09, - 0x45, 0x9b, 0x47, 0xb7, 0x58, 0x2c, 0xe3, 0xbc, 0x08, 0x97, 0xeb, 0x52, 0xdd, 0xfd, 0x0d, 0x06, - 0x99, 0x83, 0x97, 0x75, 0xdc, 0x5d, 0x43, 0x73, 0xb8, 0xda, 0x2c, 0x79, 0x07, 0xda, 0x77, 0xd3, - 0x4f, 0xd3, 0xd9, 0x97, 0xa9, 0xa9, 0x70, 0x03, 0x34, 0xef, 0x6b, 0x30, 0x9c, 0x9b, 0x8c, 0x03, - 0xb4, 0xe6, 0x81, 0x3f, 0x9e, 0x7e, 0x34, 0x1b, 0x38, 0x1e, 0x4f, 0x83, 0x7e, 0xcf, 0x54, 0xab, - 0x9f, 0x83, 0x0b, 0xb3, 0x89, 0x8a, 0xab, 0xd9, 0x9d, 0x37, 0x19, 0x9a, 0x1a, 0x8e, 0xaf, 0x27, - 0xb3, 0xcb, 0xc0, 0x6c, 0x71, 0x1d, 0x9a, 0xde, 0x6c, 0x36, 0x31, 0xdb, 0xfc, 0x10, 0x8c, 0x60, - 0x7c, 0x33, 0x9c, 0x07, 0x97, 0x37, 0xb7, 0xa6, 0xde, 0xfd, 0xd3, 0x00, 0x8d, 0xf6, 0xf3, 0x53, - 0xd0, 0xa3, 0x97, 0x22, 0xce, 0xef, 0xc3, 0xf4, 0x98, 0x59, 0xcc, 0x3e, 0x18, 0x29, 0xbe, 0x98, - 0xf0, 0x73, 0x30, 0xf2, 0xe2, 0x69, 0xb1, 0x4a, 0x10, 0x37, 0x2c, 0x66, 0x1b, 0x23, 0xc5, 0xdf, - 0x8e, 0xd0, 0xbd, 0x58, 0x15, 0xfd, 0x1e, 0x62, 0xd5, 0x62, 0xb6, 0x86, 0xee, 0x7a, 0x52, 0xd1, - 0xc1, 0x05, 0xd2, 0xa6, 0xc5, 0x6c, 0xb5, 0xa2, 0x34, 0xc1, 0xb3, 0x1f, 0xb2, 0x4d, 0x94, 0xc6, - 0x88, 0x35, 0x8b, 0xd9, 0x0c, 0xcf, 0x16, 0x23, 0x74, 0x3f, 0xa6, 0x59, 0x58, 0x20, 0x6e, 0x59, - 0xcc, 0x6e, 0xa0, 0xbb, 0x9e, 0xf0, 0x13, 0x68, 0x47, 0x59, 0x96, 0x22, 0x6c, 0x5b, 0xcc, 0xd6, - 0x47, 0x8a, 0x5f, 0x0f, 0xf8, 0x07, 0x38, 0x10, 0x79, 0xa3, 0x40, 0xb7, 0x98, 0xdd, 0xe9, 0x9d, - 0x38, 0x65, 0x29, 0x4e, 0x5d, 0x8a, 0x13, 0xd4, 0xa2, 0x91, 0xe2, 0xef, 0x38, 0x3c, 0x0d, 0xd4, - 0xe7, 0x30, 0xed, 0xfe, 0x55, 0xab, 0x9a, 0x26, 0x8b, 0xbc, 0xe0, 0x03, 0x30, 0x28, 0x18, 0xbc, - 0xa0, 0xac, 0x3a, 0xbd, 0xd7, 0x8e, 0xd4, 0xba, 0xe3, 0xd5, 0x14, 0xff, 0x88, 0x90, 0xf2, 0x77, - 0x00, 0x65, 0x62, 0x64, 0x6c, 0x90, 0xf1, 0x68, 0xc7, 0x38, 0x17, 0x78, 0xa4, 0xf8, 0x92, 0x18, - 0x57, 0x52, 0x9a, 0xe4, 0x54, 0xf7, 0xac, 0x1c, 0xd7, 0x14, 0x57, 0x0a, 0x69, 0xe5, 0x1b, 0x5c, - 0x90, 0xaf, 0xb9, 0xdf, 0x57, 0xd2, 0xca, 0x57, 0x5e, 0xe0, 0xad, 0x96, 0x05, 0x90, 0x51, 0xdb, - 0x73, 0xab, 0x57, 0x02, 0xe3, 0xad, 0x6e, 0xc5, 0xb8, 0x92, 0xca, 0x21, 0x67, 0x6b, 0xcf, 0xca, - 0xeb, 0x9a, 0xe2, 0x4a, 0x21, 0xe5, 0x7d, 0xd0, 0xb1, 0x37, 0xb2, 0xb5, 0xc9, 0xf6, 0x6a, 0x37, - 0xd4, 0x0a, 0xd2, 0x73, 0x59, 0xfd, 0xe6, 0x1e, 0x1c, 0x8a, 0xbe, 0xc8, 0x59, 0x57, 0x2c, 0x3b, - 0x03, 0x59, 0x31, 0x52, 0xfc, 0x5d, 0x8b, 0xd7, 0x01, 0xe3, 0xb9, 0xee, 0xb6, 0x7b, 0x06, 0x86, - 0x68, 0x8f, 0x9b, 0xd4, 0xfe, 0x31, 0xb3, 0x54, 0xfb, 0xc0, 0xa7, 0x07, 0xe1, 0x1c, 0x60, 0xdb, - 0x91, 0xcc, 0x8d, 0x92, 0x9f, 0x81, 0x21, 0x9a, 0x90, 0xb1, 0x26, 0xe3, 0x2a, 0x63, 0x09, 0xab, - 0xe2, 0xf4, 0x6d, 0xac, 0x32, 0x67, 0xc2, 0x2e, 0xc2, 0x93, 0x71, 0xa3, 0xc4, 0xa7, 0xa0, 0xd7, - 0x21, 0xc9, 0x54, 0x2f, 0xe9, 0x7b, 0x38, 0xdc, 0x09, 0x82, 0xbf, 0xdd, 0x4a, 0xfe, 0xfb, 0x52, - 0x90, 0xdd, 0xfb, 0x0c, 0xf2, 0x87, 0xcd, 0x03, 0x7a, 0x1d, 0x6e, 0x51, 0xfc, 0x6d, 0x90, 0x2c, - 0x8a, 0x1f, 0x9b, 0xc8, 0xf9, 0x9e, 0x2d, 0xdd, 0x24, 0xfb, 0x19, 0xff, 0x72, 0xcb, 0x6f, 0x23, - 0x1d, 0x95, 0xbb, 0x49, 0xbc, 0x8a, 0x9f, 0xc2, 0x22, 0x7e, 0x70, 0x93, 0xcc, 0x95, 0xbe, 0x9a, - 0x51, 0x8b, 0x04, 0xfd, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5d, 0xd4, 0x8d, 0x2c, 0x4b, 0x05, - 0x00, 0x00, -} diff --git a/rfcs/0000-template.md b/rfcs/0000-template.md deleted file mode 100644 index cf9e06a726b..00000000000 --- a/rfcs/0000-template.md +++ /dev/null @@ -1,86 +0,0 @@ -- Feature Name: (fill me in with a unique ident, my_awesome_feature) -- Created Date: (fill me in with today's date, YYYY-MM-DD) -- RFC PR: (leave this empty) -- Feast Issue: (leave this empty) - -# Summary -[summary]: #summary - -One paragraph explanation of the feature. - -# Motivation -[motivation]: #motivation - -Why are we doing this? What use cases does it support? What is the expected outcome? - -# Guide-level explanation -[guide-level-explanation]: #guide-level-explanation - -Explain the proposal as if the change was already included in Feast and you are teaching it to a Feast user. That generally means: - -- Introducing new named concepts. -- Explaining the feature largely in terms of examples. -- Explain how users should think about the feature, and how it should impact the way they use Feast. Explain the impact as concretely as possible. - -# Reference-level explanation -[reference-level-explanation]: #reference-level-explanation - -This is the technical portion of the RFC. Explain the design in sufficient detail that: - -- Its interaction with other features is clear. -- It is reasonably clear how the feature would be implemented. -- Corner cases are dissected by example. - -The section should return to the examples given in the previous section, and explain more fully how the detailed proposal makes those examples work. - -# Drawbacks -[drawbacks]: #drawbacks - -Why should we *not* do this? - -# Rationale and alternatives -[rationale-and-alternatives]: #rationale-and-alternatives - -- Why is this design the best in the space of possible designs? -- What other designs have been considered and what is the rationale for not choosing them? -- What is the impact of not doing this? - -# Prior art -[prior-art]: #prior-art - -Discuss prior art, both the good and the bad, in relation to this proposal. -A few examples of what this can include are: - -- Does this feature exist in other software(s) and what experience has their community had? -- For community proposals: Is this done by some other community and what were their experiences with it? -- For other teams: What lessons can we learn from what other communities have done here? - -This section is intended to encourage you as an author to think about the lessons from other projects, provide readers of your RFC with a fuller picture. If there is no prior art, that is fine - your ideas are interesting to us whether they are brand new or if it is an adaptation. - -# Unresolved questions -[unresolved-questions]: #unresolved-questions - -- What parts of the design do you expect to resolve through the RFC process before this gets merged? -- What parts of the design do you expect to resolve through the implementation of this feature before stabilization? -- What related issues do you consider out of scope for this RFC that could be addressed in the future independently of the solution that comes out of this RFC? - -# Future possibilities -[future-possibilities]: #future-possibilities - -Think about what the natural extension and evolution of your proposal would -be and how it would affect the project as a whole in a holistic -way. Try to use this section as a tool to more fully consider all possible -interactions with the project in your proposal. -Also consider how the this all fits into the roadmap for the project -and of the relevant sub-team. - -This is also a good place to "dump ideas", if they are out of scope for the -RFC you are writing but otherwise related. - -If you have tried and cannot think of any future possibilities, -you may simply state that you cannot think of anything. - -Note that having something written down in the future-possibilities section -is not a reason to accept the current or a future RFC; such notes should be -in the section on motivation or rationale in this or subsequent RFCs. -The section merely provides additional information. \ No newline at end of file diff --git a/rfcs/0001-python-sdk.md b/rfcs/0001-python-sdk.md deleted file mode 100644 index 0875a6d2956..00000000000 --- a/rfcs/0001-python-sdk.md +++ /dev/null @@ -1,566 +0,0 @@ -- Feature Name: python_sdk -- Created Date: 2018-12-24 -- RFC PR: [#47](https://github.com/gojek/feast/pull/47) -- Feast Issue: [#48](https://github.com/gojek/feast/issues/48) - -# Summary -[summary]: #summary - -The Feast Python SDK is a library used to interact with Feast from Python. It aims to allow users to manage features and entities, generate and register specifications, as well as load or retrieve feature data. - -# Motivation -[motivation]: #motivation - -Python is one of the primary ways in which data scientists explore and develop data and models. Even though Feast is intended to be used by data scientists in their projects, there is currently no way to do this from within a Python development environment. It would be very useful to have a way to interact with Feast from Python: - -* Python is typically used for exploration and prototyping at the start of ML projects, which is also where a feature store is needed. Allowing users to use Feast without leaving the their development environment would be very helpful. -* Most model training and evaluation steps are executed from within Python. By having Feast available as a Python library, we would be able to simply extend the steps before training to retreive feature data. -* ML practitioners are generally more comfortable with Python than they are with Bash and other programming languages. -* The adoption and readability makes producing Feast examples a lot easier. It would be possible to simply insert Feast code into existing Python notebooks to showcase its usage. - -# Guide-level explanation -[guide-level-explanation]: #guide-level-explanation - -The Feast Python SDK allows you to take the following actions - -* Connect to an existing Feast deployment -* Create & register entities, features, imports, and data stores -* Generate import specifications from BigQuery tables or Pandas dataframes -* Start import jobs from import specifications to load data into Feast -* Export objects into specification format for re-use -* Retrieve training data from Feast for training models -* Retrieve serving data from Feast for inference or other use cases - -## Python SDK examples - -What follows is an example of how the Python SDK can be used by the end user. - -### Connect to your Feast server - -```python -fs = client('my.feast.server') -``` - -### Create new customer entity - -```python -customer_entity = Entity('customer', "desc", tags=["loyal", "customer"]) -``` - -### Create customer features - -```python - -customer_age = Feature(name='age', - entity="customer", - owner='user@website.com', - description="Customer's age", - value_type=ValueType.INT64, - serving_store=Datastore(id="REDIS1"), - warehouse_store=Datastore(id="BIGQUERY1")) - -customer_balance = Feature(name='balance', - entity="customer", - owner='user@website.com', - value_type=ValueType.FLOAT, - description="Customer's account balance", - serving_store=Datastore(id="REDIS1"), - warehouse_store=Datastore(id="BIGQUERY1")) -``` - -### Register customer entity in Feast using the apply method - -```python -fs.apply(customer_entity) -``` - -The apply method is idempotent. It allows you to submit one or more resources to Feast. If the command is rerun, Feast will apply only the change. - -### Register customer's "age" feature in Feast using the apply method - -```python -fs.apply(customer_age) -``` - -### Register multiple customer features - -```python -fs.apply([customer_age, customer_balance]) -``` - -### Register customer's "age" feature in Feast with "create" command - -```python -fs.create(customer_age) -``` - -If the user uses the `create` method and the feature `customer_age` already exists, then failure will occur. `create` is identical to `apply`, except that it does not allow resources to already exist within Feast. - -### Create an importer from a csv -Importers allow for the creation of jobs to ingest feature data into Feast. Importers are generally (but not necessarily) created from existing data sets (BQ, CSV, Pandas dataframe) in order to discover feature names. - -```python -cust_importer = Importer.from_csv('customer_features.csv', - entity='customer', - owner='user@website.com', - staging_location="gs://my-bucket/feast", - id_column="customer_id", - timestamp_value=datetime.datetime.now()) -``` - -### [Alternative] Create an importer from a BigQuery table - -```python -driver_importer_from_bq = Importer.from_bq("my_project.dataset1.table1", - entity="s2id", - owner='user@website.com', - timestamp_column="start_time") -``` - -### Describe the importer - -```python -driver_importer_from_bq.describe() -for feat in driver_importer_from_bq.features: - print(feat) -``` - -Output: -```markdown -type: bigquery -options: - dataset: dataset1 - project: my_project - table: table1 -entities: -- s2id -schema: - entityIdColumn: s2id - fields: - - name: start_time - - name: s2id - - featureId: s2id.surge_factor - name: surge_factor - timestampColumn: start_time - -id: s2id.surge_factor -name: surge_factor -owner: user@website.com -valueType: DOUBLE -entity: s2id -dataStores: {} -``` - -### [Alternative] Create an importer from a Pandas dataframe - -```python -my_pandas_df = pd.read_csv("driver_features.csv") -driver_importer_from_df = Importer.from_df(my_pandas_df, - entity='driver', - owner='user@website.com', - staging_location="gs://staging-bucket/feast", - id_column="driver_id", - timestamp_column="ts") -``` - -### Preview the dataframe loaded by the importer - -The `head` method prints out the first 5 rows of the Pandas dataframe. - -```python -cust_importer.df.head() -``` - -### Change the local file path in the importer to GCS - -In order to ingest data from a bucket, it is necessary to modify the importer's path configuration to the location of the file that needs to be imported. - -```python -driver_importer.options.path = 'gs://my-bucket-location/driver_features.csv' -``` - -### Submit the import job - -The import job loads the CSV from GCS into Feast. It automatically registers entities and features with Feast during submission. - -```python -fs.run(cust_importer, name_override="myjob") -``` -_starting import myjob1545303576252..._ -_10%_ -_50%_ -_100%_ -_1000 feature rows imported successfully_ - -### Write out specification files for later use - -```python -cust_importer.dump("driver_feature_import.yaml") -customer_entity.dump("customer_entity.yaml") -customer_age.dump("customer_entity.yaml") -``` - -### Create a “feature set” which can be used to query both training data and serving data. - -The feature set is simply an object that locally tracks which entity, and features you are interested in. - -```python -driver_feature_set = FeatureSet(entity='driver', features=['latitude', 'longitude', 'event_time']) -``` - -### Produce training dataset - -* Stages a table in BQ with output data -* Returns information about the dataset that has been created - -```python -driver_dataset_info = fs.create_training_dataset( - driver_feature_set, - start_date='2018-01-01', - end_date='2018-02-01') -``` - -### Retrieve training dataset using CSV - -```python -file_path = 'driver_features.csv' -fs.download_dataset(driver_dataset_info, dest=file_path, type='csv') -``` - -### Load training dataset into Pandas - -```python -import pandas as pd -df = pd.read_csv(file_path) -``` - -### [Alternative] Download dataset directly into a Pandas dataframe - -```python -driver_df = fs.download_dataset_to_df(driver_dataset_info) -``` - -### Train model -The user can now split their dataset and train their model. This is out of scope for the SDK. - -### Running inference -Ensure you have the list of entity keys for which you want to retrieve features - -```python -keys = [12345, 67890] -``` - -Fetch serving data from Feast by reusing the same feature set - -```python -feature_data = fs.get_serving_data(driver_feature_set, keys, type='last') -``` - -``` -output = model.predict(feature_data) -``` - -## Impact - -From a user's perspective, the SDK allows them to stay within Python to execute most of the commands that they would need to in order to manage Feast. -* Users can define features and load data from their data processing pipelines -* Users can retrieve features for training their models in their model training pipelines -* Users can quickly query Feast for serving feature data to validate whether serving features are consistent with training. -* Users can programmatically interact with Feast from within Python, which could would be useful for managing entities, features, imports, or interacting with existing feature data. -* Overall the goal is that time to market decreases, because users are able to accelerate their development loop. - -# Reference-level explanation -[reference-level-explanation]: #reference-level-explanation - -Users of the SDK will use the following classes when interacting with Feast - -__Resource classes__ - -These classes allow users to describe and register their resources (entities, features, data stores) with Feast. - -* __Entity__: Represents an entity within Feast. -* __Feature__: Represents a single feature within Feast. -* __FeatureSet__: Represents a grouping of features within Feast (for retrieval). -* __Storage__: Represents a data store that Feast will use to store features. - -__Management classes__ - -These classes allow users to manage resources and jobs. - -* __Importer__: Used to load feature data into Feast. -* __Client__: The Feast client is the means by which resources, jobs, and data is managed. - -This reference level explanation describes some of the important underlying classes and methods of the SDK in more detail. - -### Class: Entity - -__Method__: `__init__` - -Initializes an `entity` during instantiation. - -``` -Args: - name (str): name of entity - description (str): description of entity - tags (list[str], optional): defaults to []. - list of tags for this entity -``` - -__Method__: `from_yaml` - -Create an instance of an `entity` from a yaml file. -``` -Args: - path (str): path to yaml file -``` - -__Method__: `dump` - -Export the contents of this `entity` to a yaml file. - -``` -Args: - path (str): destination file path -``` -### Class: Feature - -__Method__: `__init__` - -Initializes a `feature` during instantiation. - -``` -Args: - name (str): name of feature, in lower snake case - entity (str): entity the feature belongs to, in lower case - owner (str): owner of the feature - value_type (feast.types.ValueType_pb2.ValueType): defaults to ValueType.DOUBLE. value type of the feature - description (str): defaults to "". description of the feature - uri (str): defaults to "". uri pointing to the source code or origin of this feature - warehouse_store (feast.specs.FeatureSpec_pb2.Datastore): warehouse store id and options - serving_store (feast.specs.FeatureSpec_pb2.Datastore): serving store id and options - group (str, optional): feature group to inherit from - tags (list[str], optional): tags assigned to the feature - options (dic, optional): additional options for the feature -``` - -__Method__: `from_yaml` - -Create an instance of a `feature` from a yaml file. -``` -Args: - path (str): path to yaml file -``` - -__Method__: `dump` - -Export the contents of this `feature` to a yaml file. - -``` -Args: - path (str): destination file path -``` - -### Class: Importer - -__Method__: `from_csv` - -Creates an `importer` from a given csv dataset. This file can be either local or remote (in gcs). If it's a local file then staging_location must be specified. - -``` -Args: - path (str): path to csv file - entity (str): entity id - owner (str): owner - staging_location (str, optional): Defaults to None. Staging location for ingesting a local csv file. - id_column (str, optional): Defaults to None. Id column in the csv. If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns to ingest. If not set, the importer will by default ingest all available columns. - timestamp_column (str, optional): Defaults to None. Timestamp column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. Timestamp value to assign to all features in the dataset. - -Returns: - Importer: the importer for the dataset provided. -``` - -__Method__: `from_bq` - -Creates an `importer` from a given BigQuery table. - -``` -Args: - path (str): path to csv file - entity (str): entity id - owner (str): owner - staging_location (str, optional): Defaults to None. Staging location for ingesting a local csv file. - id_column (str, optional): Defaults to None. Id column in the csv. If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns to ingest. If not set, the importer will by default ingest all available columns. - timestamp_column (str, optional): Defaults to None. Timestamp column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. Timestamp value to assign to all features in the dataset. - -Returns: - Importer: the importer for the dataset provided. -``` - -__Method__: `from_df` - -Creates an importer from a given pandas dataframe. To import a file from a dataframe, the data must be staged locally. - -``` -Args: - path (str): path to csv file - entity (str): entity id - owner (str): owner - staging_location (str, optional): Defaults to None. Staging location for ingesting a local csv file. - id_column (str, optional): Defaults to None. Id column in the csv. If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns to ingest. If not set, the importer will by default ingest all available columns. - timestamp_column (str, optional): Defaults to None. Timestamp column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. Timestamp value to assign to all features in the dataset. - -Returns: - Importer: the importer for the dataset provided. -``` - -__Method__: `describe` - -Prints out the properties of the `importer` as an import specification. - -__Method__: `dump` - -Saves the `importer` as an import specification to the local file system. -``` - Args: - path (str): path to dump the spec to -``` - -### Class: Client - -__Method__: `__init__` - -Initializes a Feast client instance which immediately connects to the Feast deployment specified. If no url is provided, the client will default to the URL specified in the environmental variable `FEAST_CORE_URL`. - -``` -Args: - core_url (str, optional): feast's grpc endpoint URL - (e.g.: "my.feast.com:8433") - serving_url (str, optional): feast serving's grpc endpoint URL - (e.g.: "my.feast.com:8433") -``` - -__Method__: `apply` - -Create or update one or more Feast resources (feature, entity, importer, storage). This method is idempotent. It can be rerun without any side-effects, as long as the user does not attempt to change immutable properties of resources. - -``` -Args: - obj (object): one or more feast resources -``` - -__Method__: `create` - -Create one or more Feast resources (feature, entity, importer, storage). This method will return an error of the resource already exists. - -``` -Args: - obj (object): one or more feast resources -``` - -__Method__: `run` - -Submits an Importer to Feast to start an import operation of Feature data. - -``` -Args: - importer (object): Instance of an importer to run - name_override (str, optional): Set name of import Job - apply_entity (bool, optional): Run apply for entities in importer - apply_features (bool, optional): Run apply for all features in importer -``` - -__Method__: `close` - -Closes the connection to a Feast instance. - -__Method__: `get_serving_data` - -Retrieve data from the feast serving layer. You can either retrieve the the latest value, or a list of the latest values, up to a provided limit. If `server_url` is not provided, the value stored in the environment variable FEAST_SERVING_URL is used to connect to the serving server instead. - -``` -Args: - feature_set (feast.sdk.resources.feature_set.FeatureSet): feature set representing the data wanted - entity_keys (:obj: `list` of :obj: `str): list of entity keys - request_type (feast.sdk.utils.types.ServingRequestType): - (default: feast.sdk.utils.types.ServingRequestType.LAST) type of request: one of [LIST, LAST] - ts_range (:obj: `list` of :obj: `datetime.datetime`, optional): size 2 list of start timestamp and end timestamp. Only required if request_type is set to LIST - limit (int, optional): (default: 10) number of values to get. Only required if request_type is set to LIST - -Returns: - pandas.DataFrame: DataFrame of results -``` - -__Method__: `download_dataset_to_df` - -Downloads a dataset into a Pandas dataframe. - -``` -Args: - dataset (Feast.DataSet): An instance of the DataSet object which is returns from the `create_training_data` method. -Returns: - pandas.DataFrame: DataFrame of feature data -``` - -__Method__: `download_dataset` - -Downloads a dataset into a local file. - -``` -Args: - dataset (Feast.DataSet): An instance of the DataSet object which is returns from the `create_training_data` method. - dest (str): location to store the file locally. - type: format to store dataset as (csv, avro) -``` - -# Drawbacks -[drawbacks]: #drawbacks - -The primary drawback of using a programming language to interact with and manage Feast, is that it allows users to produce code that is harder to understand than simply YAML configuration (which is what Feast uses currently). For example, users would be able to use Jinja templating to produce their Feast resources, which would decrease readability. - -# Rationale and alternatives -[rationale-and-alternatives]: #rationale-and-alternatives - -Why is this design the best in the space of possible designs? - -The design contains a minimal set of functionalities that already exist in Feast, mapped over into Python from the CLI. It does not introduce any new functionality, with the exception of "Feature Sets" and the retrieval of batch feature data. The SDK simply allows access to the functionality from Python. - -What other designs have been considered and what is the rationale for not choosing them? - -Other languages have been considered, but none compare to Python for this use case. This version of the SDK is a first cut, and much of the higher order functionality like `feature sets` and `data sets` are left in a basic state. - -What is the impact of not doing this? - -* It would be very difficult to onboard new users onto Feast without having a Python example to show them. -* Frustration and development time would increase if users are asked to interact with Feast from the command line or a user interface. -* Project code would be less consistent and readable if multiple development languages are used. - -# Prior art -[prior-art]: #prior-art - -No prior art was found for this RFC. - -# Unresolved questions -[unresolved-questions]: #unresolved-questions - -What parts of the design do you expect to resolve through the RFC process before this gets merged? - -* The handling of feature sets, data sets, job execution, and arguments used for methods. - -What parts of the design do you expect to resolve through the implementation of this feature before stabilization? - -* N/A - -What related issues do you consider out of scope for this RFC that could be addressed in the future independently of the solution that comes out of this RFC? - -* N/A - -# Future possibilities -[future-possibilities]: #future-possibilities - -The Feast Python SDK is a natural iteration in making Feast more user friendly. The first version will only have a minimal API, but the SDK can become the base for adding new functionality to Feast. diff --git a/rfcs/README.md b/rfcs/README.md deleted file mode 100644 index f5735e7a8a4..00000000000 --- a/rfcs/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Feast RFCs - -[Feast RFCs]: #feast-rfcs - -Many changes, including bug fixes and documentation improvements can be -implemented and reviewed via the normal GitHub pull request workflow. -However, any substantial changes should be put through a design process and -produce a concensus. - -The "RFC" (request for comments) process is intended to provide a consistent -and controlled path for new features to enter the project. - -## What the process is -[What the process is]: #what-the-process-is - -In short, to get a major feature added to Feast, one must first get the RFC -merged into the RFC repository as a markdown file. At that point the RFC is -"active" and may be implemented with the goal of eventual inclusion into Feast. - - - Clone the `feast` repository. - - Copy `rfc/0000-template.md` to `rfc/0000-my-feature.md` (where "my-feature" is - descriptive. don't assign an RFC number yet). - - Fill in the RFC. Put care into the details: RFCs that do not present - convincing motivation, demonstrate understanding of the impact of the - design, or are disingenuous about the drawbacks or alternatives tend to be - poorly-received. - - Submit a pull request. As a pull request the RFC will receive design - feedback from the larger community, and the author should be prepared to - revise it in response. - - Build consensus and integrate feedback. RFCs that have broad support are - much more likely to make progress than those that don't receive any - comments. Feel free to reach out to the RFC assignee in particular to get - help identifying stakeholders and obstacles. - - The core team will discuss the RFC pull request, as much as possible in the - comment thread of the pull request itself. Offline discussion will be - summarized on the pull request comment thread. - - Once the consensus is reached, the approvers will merge in the pull request - after which the implementation phase begins. - -## Attribution - -This process and template is based on [Rust RFCs](https://github.com/rust-lang/rfcs). \ No newline at end of file diff --git a/sdk/go/client.go b/sdk/go/client.go new file mode 100644 index 00000000000..0eaf89b04a2 --- /dev/null +++ b/sdk/go/client.go @@ -0,0 +1,68 @@ +package feast + +import ( + "context" + "fmt" + "github.com/opentracing/opentracing-go" + + "github.com/gojek/feast/sdk/go/protos/feast/serving" + "google.golang.org/grpc" + + "go.opencensus.io/plugin/ocgrpc" +) + +// Client is a feast serving client. +type Client interface { + GetOnlineFeatures(ctx context.Context, req *OnlineFeaturesRequest) (*OnlineFeaturesResponse, error) + GetFeastServingInfo(ctx context.Context, in *serving.GetFeastServingInfoRequest) (*serving.GetFeastServingInfoResponse, error) + Close() error +} + +// GrpcClient is a grpc client for feast serving. +type GrpcClient struct { + cli serving.ServingServiceClient + conn *grpc.ClientConn +} + +// NewGrpcClient constructs a client that can interact via grpc with the feast serving instance at the given host:port. +func NewGrpcClient(host string, port int) (*GrpcClient, error) { + feastCli := &GrpcClient{} + + adr := fmt.Sprintf("%s:%d", host, port) + conn, err := grpc.Dial(adr, grpc.WithStatsHandler(&ocgrpc.ClientHandler{}), grpc.WithInsecure()) + if err != nil { + return nil, err + } + feastCli.cli = serving.NewServingServiceClient(conn) + feastCli.conn = conn + return feastCli, nil +} + +// GetOnlineFeatures gets the latest values of the request features from the Feast serving instance provided. +func (fc *GrpcClient) GetOnlineFeatures(ctx context.Context, req *OnlineFeaturesRequest) ( + *OnlineFeaturesResponse, error) { + span, ctx := opentracing.StartSpanFromContext(ctx, "get_online_features") + defer span.Finish() + + featuresRequest, err := req.buildRequest() + if err != nil { + return nil, err + } + resp, err := fc.cli.GetOnlineFeatures(ctx, featuresRequest) + + return &OnlineFeaturesResponse{RawResponse: resp}, err +} + +// GetInfo gets information about the feast serving instance this client is connected to. +func (fc *GrpcClient) GetFeastServingInfo(ctx context.Context, in *serving.GetFeastServingInfoRequest) ( + *serving.GetFeastServingInfoResponse, error) { + span, ctx := opentracing.StartSpanFromContext(ctx, "get_info") + defer span.Finish() + + return fc.cli.GetFeastServingInfo(ctx, in) +} + +// Closes the grpc connection. +func (fc *GrpcClient) Close() error { + return fc.conn.Close() +} \ No newline at end of file diff --git a/sdk/go/go.mod b/sdk/go/go.mod new file mode 100644 index 00000000000..7c029da1095 --- /dev/null +++ b/sdk/go/go.mod @@ -0,0 +1,12 @@ +module github.com/gojek/feast/sdk/go + +go 1.13 + +require ( + github.com/golang/protobuf v1.3.2 + github.com/google/go-cmp v0.3.0 + github.com/opentracing/opentracing-go v1.1.0 + github.com/stretchr/testify v1.4.0 // indirect + go.opencensus.io v0.22.1 + google.golang.org/grpc v1.24.0 +) diff --git a/sdk/go/go.sum b/sdk/go/go.sum new file mode 100644 index 00000000000..56df48673e5 --- /dev/null +++ b/sdk/go/go.sum @@ -0,0 +1,73 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +go.opencensus.io v0.22.1 h1:8dP3SGL7MPB94crU3bEPplMPe83FI4EouesJUeFHv50= +go.opencensus.io v0.22.1/go.mod h1:Ap50jQcDJrx6rB6VgeeFPtuPIf3wMRvRfrfYDO6+BmA= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd h1:r7DufRZuZbWB7j439YfAzP8RPDa9unLkpwQKUYbIMPI= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb h1:i1Ppqkc3WQXikh8bXiwHqAN5Rv3/qDCcRk0/Otx73BY= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.24.0 h1:vb/1TCsVn3DcJlQ0Gs1yB1pKI6Do2/QNwxdKqmc/b0s= +google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/sdk/go/protos/feast/core/CoreService.pb.go b/sdk/go/protos/feast/core/CoreService.pb.go new file mode 100644 index 00000000000..e0af92433b7 --- /dev/null +++ b/sdk/go/protos/feast/core/CoreService.pb.go @@ -0,0 +1,1039 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/core/CoreService.proto + +package core + +import ( + context "context" + fmt "fmt" + proto "github.com/golang/protobuf/proto" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type ApplyFeatureSetResponse_Status int32 + +const ( + // Latest feature set version is consistent with provided feature set + ApplyFeatureSetResponse_NO_CHANGE ApplyFeatureSetResponse_Status = 0 + // New feature set or feature set version created + ApplyFeatureSetResponse_CREATED ApplyFeatureSetResponse_Status = 1 + // Error occurred while trying to apply changes + ApplyFeatureSetResponse_ERROR ApplyFeatureSetResponse_Status = 2 +) + +var ApplyFeatureSetResponse_Status_name = map[int32]string{ + 0: "NO_CHANGE", + 1: "CREATED", + 2: "ERROR", +} + +var ApplyFeatureSetResponse_Status_value = map[string]int32{ + "NO_CHANGE": 0, + "CREATED": 1, + "ERROR": 2, +} + +func (x ApplyFeatureSetResponse_Status) String() string { + return proto.EnumName(ApplyFeatureSetResponse_Status_name, int32(x)) +} + +func (ApplyFeatureSetResponse_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{7, 0} +} + +type UpdateStoreResponse_Status int32 + +const ( + // Existing store config matching the given store id is identical to the given store config. + UpdateStoreResponse_NO_CHANGE UpdateStoreResponse_Status = 0 + // New store created or existing config updated. + UpdateStoreResponse_UPDATED UpdateStoreResponse_Status = 1 +) + +var UpdateStoreResponse_Status_name = map[int32]string{ + 0: "NO_CHANGE", + 1: "UPDATED", +} + +var UpdateStoreResponse_Status_value = map[string]int32{ + "NO_CHANGE": 0, + "UPDATED": 1, +} + +func (x UpdateStoreResponse_Status) String() string { + return proto.EnumName(UpdateStoreResponse_Status_name, int32(x)) +} + +func (UpdateStoreResponse_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{11, 0} +} + +// Request for a single feature set +type GetFeatureSetRequest struct { + // Name of feature set (required). + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Version of feature set (optional). If omitted then latest feature set will be returned. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeatureSetRequest) Reset() { *m = GetFeatureSetRequest{} } +func (m *GetFeatureSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeatureSetRequest) ProtoMessage() {} +func (*GetFeatureSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{0} +} + +func (m *GetFeatureSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeatureSetRequest.Unmarshal(m, b) +} +func (m *GetFeatureSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeatureSetRequest.Marshal(b, m, deterministic) +} +func (m *GetFeatureSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeatureSetRequest.Merge(m, src) +} +func (m *GetFeatureSetRequest) XXX_Size() int { + return xxx_messageInfo_GetFeatureSetRequest.Size(m) +} +func (m *GetFeatureSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeatureSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeatureSetRequest proto.InternalMessageInfo + +func (m *GetFeatureSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetFeatureSetRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +// Response containing a single feature set +type GetFeatureSetResponse struct { + FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeatureSetResponse) Reset() { *m = GetFeatureSetResponse{} } +func (m *GetFeatureSetResponse) String() string { return proto.CompactTextString(m) } +func (*GetFeatureSetResponse) ProtoMessage() {} +func (*GetFeatureSetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{1} +} + +func (m *GetFeatureSetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeatureSetResponse.Unmarshal(m, b) +} +func (m *GetFeatureSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeatureSetResponse.Marshal(b, m, deterministic) +} +func (m *GetFeatureSetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeatureSetResponse.Merge(m, src) +} +func (m *GetFeatureSetResponse) XXX_Size() int { + return xxx_messageInfo_GetFeatureSetResponse.Size(m) +} +func (m *GetFeatureSetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeatureSetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeatureSetResponse proto.InternalMessageInfo + +func (m *GetFeatureSetResponse) GetFeatureSet() *FeatureSetSpec { + if m != nil { + return m.FeatureSet + } + return nil +} + +// Retrieves details for all versions of a specific feature set +type ListFeatureSetsRequest struct { + Filter *ListFeatureSetsRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFeatureSetsRequest) Reset() { *m = ListFeatureSetsRequest{} } +func (m *ListFeatureSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFeatureSetsRequest) ProtoMessage() {} +func (*ListFeatureSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{2} +} + +func (m *ListFeatureSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFeatureSetsRequest.Unmarshal(m, b) +} +func (m *ListFeatureSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFeatureSetsRequest.Marshal(b, m, deterministic) +} +func (m *ListFeatureSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFeatureSetsRequest.Merge(m, src) +} +func (m *ListFeatureSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListFeatureSetsRequest.Size(m) +} +func (m *ListFeatureSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFeatureSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFeatureSetsRequest proto.InternalMessageInfo + +func (m *ListFeatureSetsRequest) GetFilter() *ListFeatureSetsRequest_Filter { + if m != nil { + return m.Filter + } + return nil +} + +type ListFeatureSetsRequest_Filter struct { + // Name of the desired feature set. Valid regex strings are allowed. + // e.g. + // - .* can be used to match all feature sets + // - my-project-.* can be used to match all features prefixed by "my-project" + FeatureSetName string `protobuf:"bytes,1,opt,name=feature_set_name,json=featureSetName,proto3" json:"feature_set_name,omitempty"` + // Version of the desired feature set. Either a number or valid expression can be provided. + // e.g. + // - 1 will match version 1 exactly + // - >=1 will match all versions greater or equal to 1 + // - <10 will match all versions less than 10 + FeatureSetVersion string `protobuf:"bytes,2,opt,name=feature_set_version,json=featureSetVersion,proto3" json:"feature_set_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFeatureSetsRequest_Filter) Reset() { *m = ListFeatureSetsRequest_Filter{} } +func (m *ListFeatureSetsRequest_Filter) String() string { return proto.CompactTextString(m) } +func (*ListFeatureSetsRequest_Filter) ProtoMessage() {} +func (*ListFeatureSetsRequest_Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{2, 0} +} + +func (m *ListFeatureSetsRequest_Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFeatureSetsRequest_Filter.Unmarshal(m, b) +} +func (m *ListFeatureSetsRequest_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFeatureSetsRequest_Filter.Marshal(b, m, deterministic) +} +func (m *ListFeatureSetsRequest_Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFeatureSetsRequest_Filter.Merge(m, src) +} +func (m *ListFeatureSetsRequest_Filter) XXX_Size() int { + return xxx_messageInfo_ListFeatureSetsRequest_Filter.Size(m) +} +func (m *ListFeatureSetsRequest_Filter) XXX_DiscardUnknown() { + xxx_messageInfo_ListFeatureSetsRequest_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFeatureSetsRequest_Filter proto.InternalMessageInfo + +func (m *ListFeatureSetsRequest_Filter) GetFeatureSetName() string { + if m != nil { + return m.FeatureSetName + } + return "" +} + +func (m *ListFeatureSetsRequest_Filter) GetFeatureSetVersion() string { + if m != nil { + return m.FeatureSetVersion + } + return "" +} + +type ListFeatureSetsResponse struct { + FeatureSets []*FeatureSetSpec `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFeatureSetsResponse) Reset() { *m = ListFeatureSetsResponse{} } +func (m *ListFeatureSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFeatureSetsResponse) ProtoMessage() {} +func (*ListFeatureSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{3} +} + +func (m *ListFeatureSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFeatureSetsResponse.Unmarshal(m, b) +} +func (m *ListFeatureSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFeatureSetsResponse.Marshal(b, m, deterministic) +} +func (m *ListFeatureSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFeatureSetsResponse.Merge(m, src) +} +func (m *ListFeatureSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListFeatureSetsResponse.Size(m) +} +func (m *ListFeatureSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFeatureSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFeatureSetsResponse proto.InternalMessageInfo + +func (m *ListFeatureSetsResponse) GetFeatureSets() []*FeatureSetSpec { + if m != nil { + return m.FeatureSets + } + return nil +} + +type ListStoresRequest struct { + Filter *ListStoresRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListStoresRequest) Reset() { *m = ListStoresRequest{} } +func (m *ListStoresRequest) String() string { return proto.CompactTextString(m) } +func (*ListStoresRequest) ProtoMessage() {} +func (*ListStoresRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{4} +} + +func (m *ListStoresRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListStoresRequest.Unmarshal(m, b) +} +func (m *ListStoresRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListStoresRequest.Marshal(b, m, deterministic) +} +func (m *ListStoresRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListStoresRequest.Merge(m, src) +} +func (m *ListStoresRequest) XXX_Size() int { + return xxx_messageInfo_ListStoresRequest.Size(m) +} +func (m *ListStoresRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListStoresRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListStoresRequest proto.InternalMessageInfo + +func (m *ListStoresRequest) GetFilter() *ListStoresRequest_Filter { + if m != nil { + return m.Filter + } + return nil +} + +type ListStoresRequest_Filter struct { + // Name of desired store. Regex is not supported in this query. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListStoresRequest_Filter) Reset() { *m = ListStoresRequest_Filter{} } +func (m *ListStoresRequest_Filter) String() string { return proto.CompactTextString(m) } +func (*ListStoresRequest_Filter) ProtoMessage() {} +func (*ListStoresRequest_Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{4, 0} +} + +func (m *ListStoresRequest_Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListStoresRequest_Filter.Unmarshal(m, b) +} +func (m *ListStoresRequest_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListStoresRequest_Filter.Marshal(b, m, deterministic) +} +func (m *ListStoresRequest_Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListStoresRequest_Filter.Merge(m, src) +} +func (m *ListStoresRequest_Filter) XXX_Size() int { + return xxx_messageInfo_ListStoresRequest_Filter.Size(m) +} +func (m *ListStoresRequest_Filter) XXX_DiscardUnknown() { + xxx_messageInfo_ListStoresRequest_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_ListStoresRequest_Filter proto.InternalMessageInfo + +func (m *ListStoresRequest_Filter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type ListStoresResponse struct { + Store []*Store `protobuf:"bytes,1,rep,name=store,proto3" json:"store,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListStoresResponse) Reset() { *m = ListStoresResponse{} } +func (m *ListStoresResponse) String() string { return proto.CompactTextString(m) } +func (*ListStoresResponse) ProtoMessage() {} +func (*ListStoresResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{5} +} + +func (m *ListStoresResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListStoresResponse.Unmarshal(m, b) +} +func (m *ListStoresResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListStoresResponse.Marshal(b, m, deterministic) +} +func (m *ListStoresResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListStoresResponse.Merge(m, src) +} +func (m *ListStoresResponse) XXX_Size() int { + return xxx_messageInfo_ListStoresResponse.Size(m) +} +func (m *ListStoresResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListStoresResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListStoresResponse proto.InternalMessageInfo + +func (m *ListStoresResponse) GetStore() []*Store { + if m != nil { + return m.Store + } + return nil +} + +type ApplyFeatureSetRequest struct { + // Feature set version and source will be ignored + FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyFeatureSetRequest) Reset() { *m = ApplyFeatureSetRequest{} } +func (m *ApplyFeatureSetRequest) String() string { return proto.CompactTextString(m) } +func (*ApplyFeatureSetRequest) ProtoMessage() {} +func (*ApplyFeatureSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{6} +} + +func (m *ApplyFeatureSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyFeatureSetRequest.Unmarshal(m, b) +} +func (m *ApplyFeatureSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyFeatureSetRequest.Marshal(b, m, deterministic) +} +func (m *ApplyFeatureSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyFeatureSetRequest.Merge(m, src) +} +func (m *ApplyFeatureSetRequest) XXX_Size() int { + return xxx_messageInfo_ApplyFeatureSetRequest.Size(m) +} +func (m *ApplyFeatureSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyFeatureSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyFeatureSetRequest proto.InternalMessageInfo + +func (m *ApplyFeatureSetRequest) GetFeatureSet() *FeatureSetSpec { + if m != nil { + return m.FeatureSet + } + return nil +} + +type ApplyFeatureSetResponse struct { + // Feature set response has been enriched with version and source information + FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + Status ApplyFeatureSetResponse_Status `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.ApplyFeatureSetResponse_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyFeatureSetResponse) Reset() { *m = ApplyFeatureSetResponse{} } +func (m *ApplyFeatureSetResponse) String() string { return proto.CompactTextString(m) } +func (*ApplyFeatureSetResponse) ProtoMessage() {} +func (*ApplyFeatureSetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{7} +} + +func (m *ApplyFeatureSetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyFeatureSetResponse.Unmarshal(m, b) +} +func (m *ApplyFeatureSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyFeatureSetResponse.Marshal(b, m, deterministic) +} +func (m *ApplyFeatureSetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyFeatureSetResponse.Merge(m, src) +} +func (m *ApplyFeatureSetResponse) XXX_Size() int { + return xxx_messageInfo_ApplyFeatureSetResponse.Size(m) +} +func (m *ApplyFeatureSetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyFeatureSetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyFeatureSetResponse proto.InternalMessageInfo + +func (m *ApplyFeatureSetResponse) GetFeatureSet() *FeatureSetSpec { + if m != nil { + return m.FeatureSet + } + return nil +} + +func (m *ApplyFeatureSetResponse) GetStatus() ApplyFeatureSetResponse_Status { + if m != nil { + return m.Status + } + return ApplyFeatureSetResponse_NO_CHANGE +} + +type GetFeastCoreVersionRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeastCoreVersionRequest) Reset() { *m = GetFeastCoreVersionRequest{} } +func (m *GetFeastCoreVersionRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeastCoreVersionRequest) ProtoMessage() {} +func (*GetFeastCoreVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{8} +} + +func (m *GetFeastCoreVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeastCoreVersionRequest.Unmarshal(m, b) +} +func (m *GetFeastCoreVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeastCoreVersionRequest.Marshal(b, m, deterministic) +} +func (m *GetFeastCoreVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeastCoreVersionRequest.Merge(m, src) +} +func (m *GetFeastCoreVersionRequest) XXX_Size() int { + return xxx_messageInfo_GetFeastCoreVersionRequest.Size(m) +} +func (m *GetFeastCoreVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeastCoreVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeastCoreVersionRequest proto.InternalMessageInfo + +type GetFeastCoreVersionResponse struct { + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeastCoreVersionResponse) Reset() { *m = GetFeastCoreVersionResponse{} } +func (m *GetFeastCoreVersionResponse) String() string { return proto.CompactTextString(m) } +func (*GetFeastCoreVersionResponse) ProtoMessage() {} +func (*GetFeastCoreVersionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{9} +} + +func (m *GetFeastCoreVersionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeastCoreVersionResponse.Unmarshal(m, b) +} +func (m *GetFeastCoreVersionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeastCoreVersionResponse.Marshal(b, m, deterministic) +} +func (m *GetFeastCoreVersionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeastCoreVersionResponse.Merge(m, src) +} +func (m *GetFeastCoreVersionResponse) XXX_Size() int { + return xxx_messageInfo_GetFeastCoreVersionResponse.Size(m) +} +func (m *GetFeastCoreVersionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeastCoreVersionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeastCoreVersionResponse proto.InternalMessageInfo + +func (m *GetFeastCoreVersionResponse) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +type UpdateStoreRequest struct { + Store *Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateStoreRequest) Reset() { *m = UpdateStoreRequest{} } +func (m *UpdateStoreRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateStoreRequest) ProtoMessage() {} +func (*UpdateStoreRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{10} +} + +func (m *UpdateStoreRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateStoreRequest.Unmarshal(m, b) +} +func (m *UpdateStoreRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateStoreRequest.Marshal(b, m, deterministic) +} +func (m *UpdateStoreRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateStoreRequest.Merge(m, src) +} +func (m *UpdateStoreRequest) XXX_Size() int { + return xxx_messageInfo_UpdateStoreRequest.Size(m) +} +func (m *UpdateStoreRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateStoreRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateStoreRequest proto.InternalMessageInfo + +func (m *UpdateStoreRequest) GetStore() *Store { + if m != nil { + return m.Store + } + return nil +} + +type UpdateStoreResponse struct { + Store *Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store,omitempty"` + Status UpdateStoreResponse_Status `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.UpdateStoreResponse_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateStoreResponse) Reset() { *m = UpdateStoreResponse{} } +func (m *UpdateStoreResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateStoreResponse) ProtoMessage() {} +func (*UpdateStoreResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{11} +} + +func (m *UpdateStoreResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateStoreResponse.Unmarshal(m, b) +} +func (m *UpdateStoreResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateStoreResponse.Marshal(b, m, deterministic) +} +func (m *UpdateStoreResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateStoreResponse.Merge(m, src) +} +func (m *UpdateStoreResponse) XXX_Size() int { + return xxx_messageInfo_UpdateStoreResponse.Size(m) +} +func (m *UpdateStoreResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateStoreResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateStoreResponse proto.InternalMessageInfo + +func (m *UpdateStoreResponse) GetStore() *Store { + if m != nil { + return m.Store + } + return nil +} + +func (m *UpdateStoreResponse) GetStatus() UpdateStoreResponse_Status { + if m != nil { + return m.Status + } + return UpdateStoreResponse_NO_CHANGE +} + +func init() { + proto.RegisterEnum("feast.core.ApplyFeatureSetResponse_Status", ApplyFeatureSetResponse_Status_name, ApplyFeatureSetResponse_Status_value) + proto.RegisterEnum("feast.core.UpdateStoreResponse_Status", UpdateStoreResponse_Status_name, UpdateStoreResponse_Status_value) + proto.RegisterType((*GetFeatureSetRequest)(nil), "feast.core.GetFeatureSetRequest") + proto.RegisterType((*GetFeatureSetResponse)(nil), "feast.core.GetFeatureSetResponse") + proto.RegisterType((*ListFeatureSetsRequest)(nil), "feast.core.ListFeatureSetsRequest") + proto.RegisterType((*ListFeatureSetsRequest_Filter)(nil), "feast.core.ListFeatureSetsRequest.Filter") + proto.RegisterType((*ListFeatureSetsResponse)(nil), "feast.core.ListFeatureSetsResponse") + proto.RegisterType((*ListStoresRequest)(nil), "feast.core.ListStoresRequest") + proto.RegisterType((*ListStoresRequest_Filter)(nil), "feast.core.ListStoresRequest.Filter") + proto.RegisterType((*ListStoresResponse)(nil), "feast.core.ListStoresResponse") + proto.RegisterType((*ApplyFeatureSetRequest)(nil), "feast.core.ApplyFeatureSetRequest") + proto.RegisterType((*ApplyFeatureSetResponse)(nil), "feast.core.ApplyFeatureSetResponse") + proto.RegisterType((*GetFeastCoreVersionRequest)(nil), "feast.core.GetFeastCoreVersionRequest") + proto.RegisterType((*GetFeastCoreVersionResponse)(nil), "feast.core.GetFeastCoreVersionResponse") + proto.RegisterType((*UpdateStoreRequest)(nil), "feast.core.UpdateStoreRequest") + proto.RegisterType((*UpdateStoreResponse)(nil), "feast.core.UpdateStoreResponse") +} + +func init() { proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_d9be266444105411) } + +var fileDescriptor_d9be266444105411 = []byte{ + // 636 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x72, 0xd2, 0x40, + 0x14, 0x36, 0xb5, 0xa5, 0xc3, 0x89, 0xad, 0xb0, 0x28, 0x65, 0x52, 0xac, 0x18, 0x3b, 0x16, 0xbd, + 0x48, 0x66, 0xf0, 0xc2, 0x0b, 0xc5, 0x19, 0xfe, 0x5a, 0x67, 0x74, 0xa0, 0xb3, 0x80, 0xe3, 0xf4, + 0x86, 0x01, 0xba, 0x20, 0xb6, 0x65, 0x63, 0x76, 0xe9, 0x8c, 0x6f, 0xe3, 0x85, 0xef, 0xe1, 0x03, + 0xf8, 0x52, 0x4e, 0xb2, 0x5b, 0xb2, 0x09, 0x21, 0x5c, 0xe8, 0x5d, 0xb2, 0xe7, 0x3b, 0xdf, 0x9e, + 0xfd, 0xce, 0x1f, 0x14, 0x27, 0x64, 0xc8, 0xb8, 0x3d, 0xa6, 0x2e, 0xb1, 0x1b, 0xd4, 0x25, 0x5d, + 0xe2, 0xde, 0xce, 0xc6, 0xc4, 0x72, 0x5c, 0xca, 0x29, 0x02, 0xdf, 0x6a, 0x79, 0x56, 0xe3, 0x50, + 0x41, 0x9e, 0x92, 0x21, 0x5f, 0x78, 0x60, 0x2e, 0x80, 0x46, 0x5e, 0x31, 0x76, 0x39, 0x75, 0x25, + 0x81, 0xd9, 0x84, 0x47, 0x67, 0x84, 0x07, 0x70, 0x4c, 0xbe, 0x2f, 0x08, 0xe3, 0x08, 0xc1, 0xf6, + 0x7c, 0x78, 0x43, 0x0a, 0x5a, 0x49, 0x2b, 0xa7, 0xb1, 0xff, 0x8d, 0x0a, 0xb0, 0x7b, 0x4b, 0x5c, + 0x36, 0xa3, 0xf3, 0xc2, 0x56, 0x49, 0x2b, 0xef, 0xe0, 0xbb, 0x5f, 0xb3, 0x07, 0x8f, 0x23, 0x2c, + 0xcc, 0xa1, 0x73, 0x46, 0xd0, 0x5b, 0xd0, 0x27, 0xe2, 0x74, 0xc0, 0x08, 0xf7, 0xd9, 0xf4, 0x8a, + 0x61, 0x05, 0x51, 0x5b, 0x81, 0x53, 0xd7, 0x21, 0x63, 0x0c, 0x93, 0xe5, 0xbf, 0xf9, 0x5b, 0x83, + 0xfc, 0xa7, 0x19, 0x53, 0x78, 0xd9, 0x5d, 0x78, 0x35, 0x48, 0x4d, 0x66, 0xd7, 0x9c, 0xb8, 0x92, + 0xf2, 0xa5, 0x4a, 0x19, 0xef, 0x63, 0x9d, 0xfa, 0x0e, 0x58, 0x3a, 0x1a, 0x23, 0x48, 0x89, 0x13, + 0x54, 0x86, 0x8c, 0x12, 0xe4, 0x40, 0x79, 0xf7, 0x7e, 0x10, 0x4d, 0xdb, 0x53, 0xc0, 0x82, 0x9c, + 0x8a, 0x54, 0xd5, 0x48, 0xe3, 0x6c, 0x00, 0xfe, 0x2c, 0x75, 0xf9, 0x02, 0x07, 0x2b, 0xc1, 0x48, + 0x65, 0xaa, 0xf0, 0x40, 0xa1, 0x62, 0x05, 0xad, 0x74, 0x7f, 0x83, 0x34, 0x7a, 0xc0, 0xcf, 0x4c, + 0x0a, 0x59, 0x8f, 0xd9, 0x4f, 0xe5, 0x52, 0x95, 0x77, 0x11, 0x55, 0x8e, 0xa3, 0xaa, 0x84, 0xe0, + 0x51, 0x41, 0x8a, 0x4b, 0x41, 0x62, 0x92, 0x6f, 0x56, 0x01, 0xa9, 0x0c, 0xf2, 0x15, 0x27, 0xb0, + 0xc3, 0xbc, 0x13, 0x19, 0x7e, 0x56, 0xbd, 0xd0, 0x87, 0x62, 0x61, 0x37, 0xfb, 0x90, 0xaf, 0x39, + 0xce, 0xf5, 0x8f, 0xd5, 0x4a, 0xfb, 0xa7, 0x12, 0xf9, 0xa3, 0xc1, 0xc1, 0x0a, 0xef, 0x7f, 0xa8, + 0x3d, 0x54, 0x87, 0x14, 0xe3, 0x43, 0xbe, 0x60, 0x7e, 0x72, 0xf7, 0x2b, 0xaf, 0x54, 0xbf, 0x35, + 0x37, 0x5a, 0x5d, 0xdf, 0x03, 0x4b, 0x4f, 0xd3, 0x86, 0x94, 0x38, 0x41, 0x7b, 0x90, 0x6e, 0x77, + 0x06, 0x8d, 0x0f, 0xb5, 0xf6, 0x59, 0x2b, 0x73, 0x0f, 0xe9, 0xb0, 0xdb, 0xc0, 0xad, 0x5a, 0xaf, + 0xd5, 0xcc, 0x68, 0x28, 0x0d, 0x3b, 0x2d, 0x8c, 0x3b, 0x38, 0xb3, 0x65, 0x16, 0xc1, 0x10, 0x6d, + 0xc4, 0xb8, 0xd7, 0xea, 0xb2, 0x8a, 0xa4, 0x50, 0xe6, 0x1b, 0x38, 0x8c, 0xb5, 0xca, 0xe7, 0x2a, + 0xdd, 0x29, 0xf2, 0xb6, 0xec, 0xce, 0x2a, 0xa0, 0xbe, 0x73, 0x39, 0xe4, 0x44, 0x64, 0x44, 0xea, + 0xae, 0xa4, 0x4e, 0x4b, 0x4c, 0xdd, 0x2f, 0x0d, 0x72, 0x21, 0xff, 0xd5, 0xdc, 0x27, 0x12, 0xa0, + 0xf7, 0x11, 0x2d, 0x5f, 0xa8, 0xc8, 0x18, 0xe6, 0xa8, 0x8e, 0xc7, 0x09, 0x3a, 0xf6, 0xcf, 0x9b, + 0x42, 0xc7, 0xca, 0xcf, 0x6d, 0xd0, 0x95, 0x01, 0x89, 0x26, 0x90, 0x8b, 0x91, 0x0b, 0x85, 0x2e, + 0x5f, 0xaf, 0xb6, 0x71, 0xb2, 0x11, 0x27, 0x65, 0xe8, 0xc1, 0x5e, 0x68, 0xf6, 0xa1, 0xd2, 0xaa, + 0x67, 0xb8, 0xe4, 0x8d, 0x67, 0x09, 0x08, 0xc9, 0x7a, 0x01, 0x0f, 0x23, 0x93, 0x03, 0x99, 0x9b, + 0x67, 0x9c, 0xf1, 0x3c, 0x11, 0x23, 0xb9, 0x3f, 0x02, 0x04, 0xad, 0x8c, 0x9e, 0x24, 0x0e, 0x09, + 0xe3, 0x68, 0x9d, 0x39, 0x08, 0x34, 0xd2, 0x0e, 0xe1, 0x40, 0xe3, 0xbb, 0x3e, 0x1c, 0xe8, 0xba, + 0x0e, 0x6e, 0x83, 0xae, 0x94, 0x07, 0x3a, 0x5a, 0x5b, 0x37, 0x82, 0xf3, 0xe9, 0x86, 0xba, 0xaa, + 0x77, 0x40, 0xd9, 0x97, 0xf5, 0x8c, 0x52, 0x2d, 0xe7, 0xde, 0x32, 0xbc, 0xb0, 0xa7, 0x33, 0xfe, + 0x75, 0x31, 0xb2, 0xc6, 0xf4, 0xc6, 0x9e, 0xd2, 0x6f, 0xe4, 0xca, 0x16, 0x5b, 0x93, 0x5d, 0x5e, + 0xd9, 0x53, 0x6a, 0xfb, 0x1b, 0x93, 0xd9, 0xc1, 0x26, 0x1d, 0xa5, 0xfc, 0xa3, 0xd7, 0x7f, 0x03, + 0x00, 0x00, 0xff, 0xff, 0xc7, 0xc1, 0x10, 0xf1, 0xa5, 0x07, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CoreServiceClient is the client API for CoreService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CoreServiceClient interface { + // Retrieve version information about this Feast deployment + GetFeastCoreVersion(ctx context.Context, in *GetFeastCoreVersionRequest, opts ...grpc.CallOption) (*GetFeastCoreVersionResponse, error) + // Returns a specific feature set + GetFeatureSet(ctx context.Context, in *GetFeatureSetRequest, opts ...grpc.CallOption) (*GetFeatureSetResponse, error) + // Retrieve feature set details given a filter. + // + // Returns all feature sets matching that filter. If none are found, + // an empty list will be returned. + // If no filter is provided in the request, the response will contain all the feature + // sets currently stored in the registry. + ListFeatureSets(ctx context.Context, in *ListFeatureSetsRequest, opts ...grpc.CallOption) (*ListFeatureSetsResponse, error) + // Retrieve store details given a filter. + // + // Returns all stores matching that filter. If none are found, an empty list will be returned. + // If no filter is provided in the request, the response will contain all the stores currently + // stored in the registry. + ListStores(ctx context.Context, in *ListStoresRequest, opts ...grpc.CallOption) (*ListStoresResponse, error) + // Create or update and existing feature set. + // + // This function is idempotent - it will not create a new feature set if schema does not change. + // If an existing feature set is updated, core will advance the version number, which will be + // returned in response. + ApplyFeatureSet(ctx context.Context, in *ApplyFeatureSetRequest, opts ...grpc.CallOption) (*ApplyFeatureSetResponse, error) + // Updates core with the configuration of the store. + // + // If the changes are valid, core will return the given store configuration in response, and + // start or update the necessary feature population jobs for the updated store. + UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) +} + +type coreServiceClient struct { + cc *grpc.ClientConn +} + +func NewCoreServiceClient(cc *grpc.ClientConn) CoreServiceClient { + return &coreServiceClient{cc} +} + +func (c *coreServiceClient) GetFeastCoreVersion(ctx context.Context, in *GetFeastCoreVersionRequest, opts ...grpc.CallOption) (*GetFeastCoreVersionResponse, error) { + out := new(GetFeastCoreVersionResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetFeastCoreVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) GetFeatureSet(ctx context.Context, in *GetFeatureSetRequest, opts ...grpc.CallOption) (*GetFeatureSetResponse, error) { + out := new(GetFeatureSetResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetFeatureSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) ListFeatureSets(ctx context.Context, in *ListFeatureSetsRequest, opts ...grpc.CallOption) (*ListFeatureSetsResponse, error) { + out := new(ListFeatureSetsResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListFeatureSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) ListStores(ctx context.Context, in *ListStoresRequest, opts ...grpc.CallOption) (*ListStoresResponse, error) { + out := new(ListStoresResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListStores", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) ApplyFeatureSet(ctx context.Context, in *ApplyFeatureSetRequest, opts ...grpc.CallOption) (*ApplyFeatureSetResponse, error) { + out := new(ApplyFeatureSetResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyFeatureSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) { + out := new(UpdateStoreResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/UpdateStore", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CoreServiceServer is the server API for CoreService service. +type CoreServiceServer interface { + // Retrieve version information about this Feast deployment + GetFeastCoreVersion(context.Context, *GetFeastCoreVersionRequest) (*GetFeastCoreVersionResponse, error) + // Returns a specific feature set + GetFeatureSet(context.Context, *GetFeatureSetRequest) (*GetFeatureSetResponse, error) + // Retrieve feature set details given a filter. + // + // Returns all feature sets matching that filter. If none are found, + // an empty list will be returned. + // If no filter is provided in the request, the response will contain all the feature + // sets currently stored in the registry. + ListFeatureSets(context.Context, *ListFeatureSetsRequest) (*ListFeatureSetsResponse, error) + // Retrieve store details given a filter. + // + // Returns all stores matching that filter. If none are found, an empty list will be returned. + // If no filter is provided in the request, the response will contain all the stores currently + // stored in the registry. + ListStores(context.Context, *ListStoresRequest) (*ListStoresResponse, error) + // Create or update and existing feature set. + // + // This function is idempotent - it will not create a new feature set if schema does not change. + // If an existing feature set is updated, core will advance the version number, which will be + // returned in response. + ApplyFeatureSet(context.Context, *ApplyFeatureSetRequest) (*ApplyFeatureSetResponse, error) + // Updates core with the configuration of the store. + // + // If the changes are valid, core will return the given store configuration in response, and + // start or update the necessary feature population jobs for the updated store. + UpdateStore(context.Context, *UpdateStoreRequest) (*UpdateStoreResponse, error) +} + +// UnimplementedCoreServiceServer can be embedded to have forward compatible implementations. +type UnimplementedCoreServiceServer struct { +} + +func (*UnimplementedCoreServiceServer) GetFeastCoreVersion(ctx context.Context, req *GetFeastCoreVersionRequest) (*GetFeastCoreVersionResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeastCoreVersion not implemented") +} +func (*UnimplementedCoreServiceServer) GetFeatureSet(ctx context.Context, req *GetFeatureSetRequest) (*GetFeatureSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeatureSet not implemented") +} +func (*UnimplementedCoreServiceServer) ListFeatureSets(ctx context.Context, req *ListFeatureSetsRequest) (*ListFeatureSetsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListFeatureSets not implemented") +} +func (*UnimplementedCoreServiceServer) ListStores(ctx context.Context, req *ListStoresRequest) (*ListStoresResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListStores not implemented") +} +func (*UnimplementedCoreServiceServer) ApplyFeatureSet(ctx context.Context, req *ApplyFeatureSetRequest) (*ApplyFeatureSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ApplyFeatureSet not implemented") +} +func (*UnimplementedCoreServiceServer) UpdateStore(ctx context.Context, req *UpdateStoreRequest) (*UpdateStoreResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateStore not implemented") +} + +func RegisterCoreServiceServer(s *grpc.Server, srv CoreServiceServer) { + s.RegisterService(&_CoreService_serviceDesc, srv) +} + +func _CoreService_GetFeastCoreVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeastCoreVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).GetFeastCoreVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/GetFeastCoreVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).GetFeastCoreVersion(ctx, req.(*GetFeastCoreVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_GetFeatureSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeatureSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).GetFeatureSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/GetFeatureSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).GetFeatureSet(ctx, req.(*GetFeatureSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_ListFeatureSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFeatureSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).ListFeatureSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/ListFeatureSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).ListFeatureSets(ctx, req.(*ListFeatureSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_ListStores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListStoresRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).ListStores(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/ListStores", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).ListStores(ctx, req.(*ListStoresRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_ApplyFeatureSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ApplyFeatureSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).ApplyFeatureSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/ApplyFeatureSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).ApplyFeatureSet(ctx, req.(*ApplyFeatureSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_UpdateStore_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateStoreRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).UpdateStore(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/UpdateStore", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).UpdateStore(ctx, req.(*UpdateStoreRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CoreService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "feast.core.CoreService", + HandlerType: (*CoreServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeastCoreVersion", + Handler: _CoreService_GetFeastCoreVersion_Handler, + }, + { + MethodName: "GetFeatureSet", + Handler: _CoreService_GetFeatureSet_Handler, + }, + { + MethodName: "ListFeatureSets", + Handler: _CoreService_ListFeatureSets_Handler, + }, + { + MethodName: "ListStores", + Handler: _CoreService_ListStores_Handler, + }, + { + MethodName: "ApplyFeatureSet", + Handler: _CoreService_ApplyFeatureSet_Handler, + }, + { + MethodName: "UpdateStore", + Handler: _CoreService_UpdateStore_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "feast/core/CoreService.proto", +} diff --git a/sdk/go/protos/feast/core/FeatureSet.pb.go b/sdk/go/protos/feast/core/FeatureSet.pb.go new file mode 100644 index 00000000000..79cbcaa94b1 --- /dev/null +++ b/sdk/go/protos/feast/core/FeatureSet.pb.go @@ -0,0 +1,247 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/core/FeatureSet.proto + +package core + +import ( + fmt "fmt" + types "github.com/gojek/feast/sdk/go/protos/feast/types" + proto "github.com/golang/protobuf/proto" + duration "github.com/golang/protobuf/ptypes/duration" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type FeatureSetSpec struct { + // Name of the featureSet. Must be unique. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // FeatureSet version. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // List of entities contained within this featureSet. + // This allows the feature to be used during joins between feature sets. + // If the featureSet is ingested into a store that supports keys, this value + // will be made a key. + Entities []*EntitySpec `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + // List of features contained within this featureSet. + Features []*FeatureSpec `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` + // Features in this feature set will only be retrieved if they are found + // after [time - max_age]. Missing or older feature values will be returned + // as nulls and indicated to end user + MaxAge *duration.Duration `protobuf:"bytes,5,opt,name=max_age,json=maxAge,proto3" json:"max_age,omitempty"` + // Optional. Source on which feature rows can be found. + // If not set, source will be set to the default value configured in Feast Core. + Source *Source `protobuf:"bytes,6,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureSetSpec) Reset() { *m = FeatureSetSpec{} } +func (m *FeatureSetSpec) String() string { return proto.CompactTextString(m) } +func (*FeatureSetSpec) ProtoMessage() {} +func (*FeatureSetSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{0} +} + +func (m *FeatureSetSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureSetSpec.Unmarshal(m, b) +} +func (m *FeatureSetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureSetSpec.Marshal(b, m, deterministic) +} +func (m *FeatureSetSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSetSpec.Merge(m, src) +} +func (m *FeatureSetSpec) XXX_Size() int { + return xxx_messageInfo_FeatureSetSpec.Size(m) +} +func (m *FeatureSetSpec) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureSetSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureSetSpec proto.InternalMessageInfo + +func (m *FeatureSetSpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FeatureSetSpec) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *FeatureSetSpec) GetEntities() []*EntitySpec { + if m != nil { + return m.Entities + } + return nil +} + +func (m *FeatureSetSpec) GetFeatures() []*FeatureSpec { + if m != nil { + return m.Features + } + return nil +} + +func (m *FeatureSetSpec) GetMaxAge() *duration.Duration { + if m != nil { + return m.MaxAge + } + return nil +} + +func (m *FeatureSetSpec) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +type EntitySpec struct { + // Name of the entity. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value type of the feature. + ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntitySpec) Reset() { *m = EntitySpec{} } +func (m *EntitySpec) String() string { return proto.CompactTextString(m) } +func (*EntitySpec) ProtoMessage() {} +func (*EntitySpec) Descriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{1} +} + +func (m *EntitySpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntitySpec.Unmarshal(m, b) +} +func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntitySpec.Marshal(b, m, deterministic) +} +func (m *EntitySpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntitySpec.Merge(m, src) +} +func (m *EntitySpec) XXX_Size() int { + return xxx_messageInfo_EntitySpec.Size(m) +} +func (m *EntitySpec) XXX_DiscardUnknown() { + xxx_messageInfo_EntitySpec.DiscardUnknown(m) +} + +var xxx_messageInfo_EntitySpec proto.InternalMessageInfo + +func (m *EntitySpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EntitySpec) GetValueType() types.ValueType_Enum { + if m != nil { + return m.ValueType + } + return types.ValueType_INVALID +} + +type FeatureSpec struct { + // Name of the feature. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value type of the feature. + ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureSpec) Reset() { *m = FeatureSpec{} } +func (m *FeatureSpec) String() string { return proto.CompactTextString(m) } +func (*FeatureSpec) ProtoMessage() {} +func (*FeatureSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{2} +} + +func (m *FeatureSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureSpec.Unmarshal(m, b) +} +func (m *FeatureSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureSpec.Marshal(b, m, deterministic) +} +func (m *FeatureSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSpec.Merge(m, src) +} +func (m *FeatureSpec) XXX_Size() int { + return xxx_messageInfo_FeatureSpec.Size(m) +} +func (m *FeatureSpec) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureSpec proto.InternalMessageInfo + +func (m *FeatureSpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FeatureSpec) GetValueType() types.ValueType_Enum { + if m != nil { + return m.ValueType + } + return types.ValueType_INVALID +} + +func init() { + proto.RegisterType((*FeatureSetSpec)(nil), "feast.core.FeatureSetSpec") + proto.RegisterType((*EntitySpec)(nil), "feast.core.EntitySpec") + proto.RegisterType((*FeatureSpec)(nil), "feast.core.FeatureSpec") +} + +func init() { proto.RegisterFile("feast/core/FeatureSet.proto", fileDescriptor_972fbd278ac19c0c) } + +var fileDescriptor_972fbd278ac19c0c = []byte{ + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x52, 0x4d, 0x6f, 0xe2, 0x30, + 0x10, 0x55, 0xf8, 0x08, 0x30, 0x48, 0xac, 0xe4, 0xc3, 0x92, 0x5d, 0xa4, 0x55, 0xc4, 0x29, 0xda, + 0x83, 0x2d, 0x85, 0x5b, 0x6f, 0x45, 0x6d, 0x8f, 0x55, 0x15, 0xaa, 0x1e, 0xaa, 0x56, 0xc8, 0x84, + 0x21, 0x4d, 0x21, 0x71, 0x14, 0x3b, 0x08, 0x7e, 0x41, 0xff, 0x76, 0x15, 0x1b, 0x37, 0x39, 0xf4, + 0xd8, 0x9b, 0xc7, 0xef, 0xcd, 0xc7, 0x7b, 0x33, 0x30, 0xdb, 0x21, 0x97, 0x8a, 0xc5, 0xa2, 0x44, + 0x76, 0x87, 0x5c, 0x55, 0x25, 0xae, 0x50, 0xd1, 0xa2, 0x14, 0x4a, 0x10, 0xd0, 0x20, 0xad, 0xc1, + 0xbf, 0x53, 0x43, 0x54, 0xe7, 0x02, 0x25, 0x7b, 0xe2, 0x87, 0x0a, 0x0d, 0xc9, 0x02, 0xba, 0xc2, + 0x4a, 0x54, 0x65, 0x6c, 0x81, 0x7f, 0x89, 0x10, 0xc9, 0x01, 0x99, 0x8e, 0x36, 0xd5, 0x8e, 0x6d, + 0xab, 0x92, 0xab, 0x54, 0xe4, 0x06, 0x9f, 0x7f, 0x74, 0x60, 0xd2, 0xb4, 0x5c, 0x15, 0x18, 0x13, + 0x02, 0xbd, 0x9c, 0x67, 0xe8, 0x39, 0xbe, 0x13, 0x8c, 0x22, 0xfd, 0x26, 0x1e, 0x0c, 0x8e, 0x58, + 0xca, 0x54, 0xe4, 0x5e, 0xc7, 0x77, 0x82, 0x7e, 0x64, 0x43, 0x12, 0xc2, 0x10, 0x73, 0x95, 0xaa, + 0x14, 0xa5, 0xd7, 0xf5, 0xbb, 0xc1, 0x38, 0xfc, 0x4d, 0x9b, 0x89, 0xe9, 0x6d, 0x8d, 0x9d, 0xeb, + 0xba, 0xd1, 0x17, 0x8f, 0x2c, 0x60, 0xb8, 0x33, 0x3d, 0xa5, 0xd7, 0xd3, 0x39, 0xd3, 0x76, 0x8e, + 0x9d, 0x47, 0x27, 0x59, 0x22, 0x09, 0x61, 0x90, 0xf1, 0xd3, 0x9a, 0x27, 0xe8, 0xf5, 0x7d, 0x27, + 0x18, 0x87, 0x7f, 0xa8, 0xd1, 0x46, 0xad, 0x36, 0x7a, 0x73, 0xd1, 0x16, 0xb9, 0x19, 0x3f, 0x5d, + 0x27, 0x48, 0xfe, 0x83, 0x2b, 0xb5, 0x1b, 0x9e, 0xab, 0x53, 0x48, 0xbb, 0x8d, 0xf1, 0x29, 0xba, + 0x30, 0xe6, 0x2f, 0x00, 0xcd, 0xb0, 0xdf, 0x9a, 0x70, 0x05, 0x70, 0xac, 0x3d, 0x5f, 0xd7, 0xfe, + 0x6b, 0x1f, 0x26, 0xe1, 0xec, 0x52, 0x51, 0xaf, 0x84, 0xea, 0x95, 0x3c, 0x9e, 0x8b, 0x5a, 0x77, + 0x95, 0x45, 0xa3, 0xa3, 0x8d, 0xe7, 0xaf, 0x30, 0x6e, 0xc9, 0xfa, 0xe9, 0xf2, 0xcb, 0x7b, 0x68, + 0x9d, 0xc9, 0xf2, 0x57, 0xb3, 0xd1, 0x87, 0xda, 0x9b, 0x67, 0x96, 0xa4, 0xea, 0xad, 0xda, 0xd0, + 0x58, 0x64, 0x2c, 0x11, 0xef, 0xb8, 0x67, 0xe6, 0x5e, 0xe4, 0x76, 0xcf, 0x12, 0x61, 0x8e, 0x43, + 0xb2, 0xe6, 0x86, 0x36, 0xae, 0xfe, 0x5a, 0x7c, 0x06, 0x00, 0x00, 0xff, 0xff, 0xdb, 0x96, 0x15, + 0x04, 0x9a, 0x02, 0x00, 0x00, +} diff --git a/sdk/go/protos/feast/core/Source.pb.go b/sdk/go/protos/feast/core/Source.pb.go new file mode 100644 index 00000000000..090210a4961 --- /dev/null +++ b/sdk/go/protos/feast/core/Source.pb.go @@ -0,0 +1,201 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/core/Source.proto + +package core + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type SourceType int32 + +const ( + SourceType_INVALID SourceType = 0 + SourceType_KAFKA SourceType = 1 +) + +var SourceType_name = map[int32]string{ + 0: "INVALID", + 1: "KAFKA", +} + +var SourceType_value = map[string]int32{ + "INVALID": 0, + "KAFKA": 1, +} + +func (x SourceType) String() string { + return proto.EnumName(SourceType_name, int32(x)) +} + +func (SourceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_4d161c4e53091468, []int{0} +} + +type Source struct { + // The kind of data source Feast should connect to in order to retrieve FeatureRow value + Type SourceType `protobuf:"varint,1,opt,name=type,proto3,enum=feast.core.SourceType" json:"type,omitempty"` + // Source specific configuration + // + // Types that are valid to be assigned to SourceConfig: + // *Source_KafkaSourceConfig + SourceConfig isSource_SourceConfig `protobuf_oneof:"source_config"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_4d161c4e53091468, []int{0} +} + +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (m *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(m, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetType() SourceType { + if m != nil { + return m.Type + } + return SourceType_INVALID +} + +type isSource_SourceConfig interface { + isSource_SourceConfig() +} + +type Source_KafkaSourceConfig struct { + KafkaSourceConfig *KafkaSourceConfig `protobuf:"bytes,2,opt,name=kafka_source_config,json=kafkaSourceConfig,proto3,oneof"` +} + +func (*Source_KafkaSourceConfig) isSource_SourceConfig() {} + +func (m *Source) GetSourceConfig() isSource_SourceConfig { + if m != nil { + return m.SourceConfig + } + return nil +} + +func (m *Source) GetKafkaSourceConfig() *KafkaSourceConfig { + if x, ok := m.GetSourceConfig().(*Source_KafkaSourceConfig); ok { + return x.KafkaSourceConfig + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Source) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Source_KafkaSourceConfig)(nil), + } +} + +type KafkaSourceConfig struct { + // - bootstrapServers: [comma delimited value of host[:port]] + BootstrapServers string `protobuf:"bytes,1,opt,name=bootstrap_servers,json=bootstrapServers,proto3" json:"bootstrap_servers,omitempty"` + // - topics: [Kafka topic name. This value is provisioned by core and should not be set by the user.] + Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KafkaSourceConfig) Reset() { *m = KafkaSourceConfig{} } +func (m *KafkaSourceConfig) String() string { return proto.CompactTextString(m) } +func (*KafkaSourceConfig) ProtoMessage() {} +func (*KafkaSourceConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_4d161c4e53091468, []int{1} +} + +func (m *KafkaSourceConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KafkaSourceConfig.Unmarshal(m, b) +} +func (m *KafkaSourceConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KafkaSourceConfig.Marshal(b, m, deterministic) +} +func (m *KafkaSourceConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_KafkaSourceConfig.Merge(m, src) +} +func (m *KafkaSourceConfig) XXX_Size() int { + return xxx_messageInfo_KafkaSourceConfig.Size(m) +} +func (m *KafkaSourceConfig) XXX_DiscardUnknown() { + xxx_messageInfo_KafkaSourceConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_KafkaSourceConfig proto.InternalMessageInfo + +func (m *KafkaSourceConfig) GetBootstrapServers() string { + if m != nil { + return m.BootstrapServers + } + return "" +} + +func (m *KafkaSourceConfig) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func init() { + proto.RegisterEnum("feast.core.SourceType", SourceType_name, SourceType_value) + proto.RegisterType((*Source)(nil), "feast.core.Source") + proto.RegisterType((*KafkaSourceConfig)(nil), "feast.core.KafkaSourceConfig") +} + +func init() { proto.RegisterFile("feast/core/Source.proto", fileDescriptor_4d161c4e53091468) } + +var fileDescriptor_4d161c4e53091468 = []byte{ + // 273 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x86, 0x57, 0x71, 0x93, 0x9e, 0xa2, 0xb6, 0x51, 0x74, 0x37, 0xc2, 0x18, 0x5e, 0x8c, 0x09, + 0x0d, 0xcc, 0x27, 0x68, 0x15, 0x71, 0x56, 0x54, 0x3a, 0xd9, 0x85, 0x37, 0xa5, 0xad, 0x69, 0xad, + 0x41, 0x4f, 0x48, 0x32, 0x61, 0x2f, 0xe2, 0xf3, 0x4a, 0x13, 0xb0, 0xd3, 0x5d, 0xf6, 0xff, 0xbf, + 0xbf, 0x9c, 0x7c, 0x70, 0x5a, 0xb1, 0x5c, 0x69, 0x5a, 0xa2, 0x64, 0x74, 0x81, 0x2b, 0x59, 0xb2, + 0x50, 0x48, 0xd4, 0x48, 0xc0, 0x14, 0x61, 0x5b, 0x8c, 0xbf, 0x1d, 0x18, 0xd8, 0x92, 0x4c, 0x61, + 0x57, 0xaf, 0x05, 0x1b, 0x3a, 0x23, 0x67, 0x72, 0x30, 0x3b, 0x09, 0x3b, 0x2a, 0xb4, 0xc4, 0xf3, + 0x5a, 0xb0, 0xd4, 0x30, 0xe4, 0x11, 0x8e, 0x78, 0x5e, 0xf1, 0x3c, 0x53, 0xa6, 0xc9, 0x4a, 0xfc, + 0xac, 0x9a, 0x7a, 0xb8, 0x33, 0x72, 0x26, 0xde, 0xec, 0x6c, 0x73, 0x9a, 0xb4, 0x98, 0xdd, 0x5f, + 0x19, 0xe8, 0xb6, 0x97, 0x06, 0xfc, 0x7f, 0x18, 0x1f, 0xc2, 0xfe, 0x9f, 0x5f, 0x8d, 0x97, 0x10, + 0x6c, 0x4d, 0xc9, 0x05, 0x04, 0x05, 0xa2, 0x56, 0x5a, 0xe6, 0x22, 0x53, 0x4c, 0x7e, 0x31, 0xa9, + 0xcc, 0xbd, 0x6e, 0xea, 0xff, 0x16, 0x0b, 0x9b, 0x93, 0x63, 0xe8, 0x6b, 0x14, 0x4d, 0x69, 0xae, + 0x72, 0x53, 0xfb, 0x31, 0x3d, 0x07, 0xe8, 0x5e, 0x43, 0x3c, 0xd8, 0x9b, 0x3f, 0x2c, 0xa3, 0xfb, + 0xf9, 0xb5, 0xdf, 0x23, 0x2e, 0xf4, 0x93, 0xe8, 0x26, 0x89, 0x7c, 0x27, 0xbe, 0x83, 0x0d, 0x49, + 0xb1, 0x67, 0x17, 0x4f, 0xad, 0xbd, 0x17, 0x5a, 0x37, 0xfa, 0x6d, 0x55, 0x84, 0x25, 0x7e, 0xd0, + 0x1a, 0xdf, 0x19, 0xa7, 0xd6, 0xb3, 0x7a, 0xe5, 0xb4, 0x46, 0x6a, 0x14, 0x2b, 0xda, 0xb9, 0x2f, + 0x06, 0x26, 0xba, 0xfc, 0x09, 0x00, 0x00, 0xff, 0xff, 0xc3, 0xc9, 0xf2, 0x62, 0x90, 0x01, 0x00, + 0x00, +} diff --git a/sdk/go/protos/feast/core/Store.pb.go b/sdk/go/protos/feast/core/Store.pb.go new file mode 100644 index 00000000000..5dafb63d942 --- /dev/null +++ b/sdk/go/protos/feast/core/Store.pb.go @@ -0,0 +1,489 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/core/Store.proto + +package core + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Store_StoreType int32 + +const ( + Store_INVALID Store_StoreType = 0 + // Redis stores a FeatureRow element as a key, value pair. + // + // The Redis data types used (https://redis.io/topics/data-types): + // - key: STRING + // - value: STRING + // + // Encodings: + // - key: byte array of RedisKey (refer to feast.storage.RedisKey) + // - value: byte array of FeatureRow (refer to feast.types.FeatureRow) + // + Store_REDIS Store_StoreType = 1 + // BigQuery stores a FeatureRow element as a row in a BigQuery table. + // + // Table name is derived from the feature set name and version as: + // [feature_set_name]_v[feature_set_version] + // + // For example: + // A feature row for feature set "driver" and version "1" will be written + // to table "driver_v1". + // + // The entities and features in a FeatureSetSpec corresponds to the + // fields in the BigQuery table (these make up the BigQuery schema). + // The name of the entity spec and feature spec corresponds to the column + // names, and the value_type of entity spec and feature spec corresponds + // to BigQuery standard SQL data type of the column. + // + // The following BigQuery fields are reserved for Feast internal use. + // Ingestion of entity or feature spec with names identical + // to the following field names will raise an exception during ingestion. + // + // column_name | column_data_type | description + // ====================|==================|================================ + // - event_timestamp | TIMESTAMP | event time of the FeatureRow + // - created_timestamp | TIMESTAMP | processing time of the ingestion of the FeatureRow + // - job_id | STRING | identifier for the job that writes the FeatureRow to the corresponding BigQuery table + // + // BigQuery table created will be partitioned by the field "event_timestamp" + // of the FeatureRow (https://cloud.google.com/bigquery/docs/partitioned-tables). + // + // Since newer version of feature set can introduce breaking, non backward- + // compatible BigQuery schema updates, incrementing the version of a + // feature set will result in the creation of a new empty BigQuery table + // with the new schema. + // + // The following table shows how ValueType in Feast is mapped to + // BigQuery Standard SQL data types + // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types): + // + // BYTES : BYTES + // STRING : STRING + // INT32 : INT64 + // INT64 : IN64 + // DOUBLE : FLOAT64 + // FLOAT : FLOAT64 + // BOOL : BOOL + // BYTES_LIST : ARRAY + // STRING_LIST : ARRAY + // INT32_LIST : ARRAY + // INT64_LIST : ARRAY + // DOUBLE_LIST : ARRAY + // FLOAT_LIST : ARRAY + // BOOL_LIST : ARRAY + // + // The column mode in BigQuery is set to "Nullable" such that unset Value + // in a FeatureRow corresponds to NULL value in BigQuery. + // + Store_BIGQUERY Store_StoreType = 2 + // Unsupported in Feast 0.3 + Store_CASSANDRA Store_StoreType = 3 +) + +var Store_StoreType_name = map[int32]string{ + 0: "INVALID", + 1: "REDIS", + 2: "BIGQUERY", + 3: "CASSANDRA", +} + +var Store_StoreType_value = map[string]int32{ + "INVALID": 0, + "REDIS": 1, + "BIGQUERY": 2, + "CASSANDRA": 3, +} + +func (x Store_StoreType) String() string { + return proto.EnumName(Store_StoreType_name, int32(x)) +} + +func (Store_StoreType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0, 0} +} + +// Store provides a location where Feast reads and writes feature values. +// Feature values will be written to the Store in the form of FeatureRow elements. +// The way FeatureRow is encoded and decoded when it is written to and read from +// the Store depends on the type of the Store. +// +// For example, a FeatureRow will materialize as a row in a table in +// BigQuery but it will materialize as a key, value pair element in Redis. +// +type Store struct { + // Name of the store. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Type of store. + Type Store_StoreType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.core.Store_StoreType" json:"type,omitempty"` + // Feature sets to subscribe to. + Subscriptions []*Store_Subscription `protobuf:"bytes,4,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` + // Configuration to connect to the store. Required. + // + // Types that are valid to be assigned to Config: + // *Store_RedisConfig_ + // *Store_BigqueryConfig + // *Store_CassandraConfig_ + Config isStore_Config `protobuf_oneof:"config"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Store) Reset() { *m = Store{} } +func (m *Store) String() string { return proto.CompactTextString(m) } +func (*Store) ProtoMessage() {} +func (*Store) Descriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0} +} + +func (m *Store) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Store.Unmarshal(m, b) +} +func (m *Store) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Store.Marshal(b, m, deterministic) +} +func (m *Store) XXX_Merge(src proto.Message) { + xxx_messageInfo_Store.Merge(m, src) +} +func (m *Store) XXX_Size() int { + return xxx_messageInfo_Store.Size(m) +} +func (m *Store) XXX_DiscardUnknown() { + xxx_messageInfo_Store.DiscardUnknown(m) +} + +var xxx_messageInfo_Store proto.InternalMessageInfo + +func (m *Store) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Store) GetType() Store_StoreType { + if m != nil { + return m.Type + } + return Store_INVALID +} + +func (m *Store) GetSubscriptions() []*Store_Subscription { + if m != nil { + return m.Subscriptions + } + return nil +} + +type isStore_Config interface { + isStore_Config() +} + +type Store_RedisConfig_ struct { + RedisConfig *Store_RedisConfig `protobuf:"bytes,11,opt,name=redis_config,json=redisConfig,proto3,oneof"` +} + +type Store_BigqueryConfig struct { + BigqueryConfig *Store_BigQueryConfig `protobuf:"bytes,12,opt,name=bigquery_config,json=bigqueryConfig,proto3,oneof"` +} + +type Store_CassandraConfig_ struct { + CassandraConfig *Store_CassandraConfig `protobuf:"bytes,13,opt,name=cassandra_config,json=cassandraConfig,proto3,oneof"` +} + +func (*Store_RedisConfig_) isStore_Config() {} + +func (*Store_BigqueryConfig) isStore_Config() {} + +func (*Store_CassandraConfig_) isStore_Config() {} + +func (m *Store) GetConfig() isStore_Config { + if m != nil { + return m.Config + } + return nil +} + +func (m *Store) GetRedisConfig() *Store_RedisConfig { + if x, ok := m.GetConfig().(*Store_RedisConfig_); ok { + return x.RedisConfig + } + return nil +} + +func (m *Store) GetBigqueryConfig() *Store_BigQueryConfig { + if x, ok := m.GetConfig().(*Store_BigqueryConfig); ok { + return x.BigqueryConfig + } + return nil +} + +func (m *Store) GetCassandraConfig() *Store_CassandraConfig { + if x, ok := m.GetConfig().(*Store_CassandraConfig_); ok { + return x.CassandraConfig + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Store) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Store_RedisConfig_)(nil), + (*Store_BigqueryConfig)(nil), + (*Store_CassandraConfig_)(nil), + } +} + +type Store_RedisConfig struct { + Host string `protobuf:"bytes,1,opt,name=host,proto3" json:"host,omitempty"` + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Store_RedisConfig) Reset() { *m = Store_RedisConfig{} } +func (m *Store_RedisConfig) String() string { return proto.CompactTextString(m) } +func (*Store_RedisConfig) ProtoMessage() {} +func (*Store_RedisConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0, 0} +} + +func (m *Store_RedisConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Store_RedisConfig.Unmarshal(m, b) +} +func (m *Store_RedisConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Store_RedisConfig.Marshal(b, m, deterministic) +} +func (m *Store_RedisConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Store_RedisConfig.Merge(m, src) +} +func (m *Store_RedisConfig) XXX_Size() int { + return xxx_messageInfo_Store_RedisConfig.Size(m) +} +func (m *Store_RedisConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Store_RedisConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Store_RedisConfig proto.InternalMessageInfo + +func (m *Store_RedisConfig) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Store_RedisConfig) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +type Store_BigQueryConfig struct { + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Store_BigQueryConfig) Reset() { *m = Store_BigQueryConfig{} } +func (m *Store_BigQueryConfig) String() string { return proto.CompactTextString(m) } +func (*Store_BigQueryConfig) ProtoMessage() {} +func (*Store_BigQueryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0, 1} +} + +func (m *Store_BigQueryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Store_BigQueryConfig.Unmarshal(m, b) +} +func (m *Store_BigQueryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Store_BigQueryConfig.Marshal(b, m, deterministic) +} +func (m *Store_BigQueryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Store_BigQueryConfig.Merge(m, src) +} +func (m *Store_BigQueryConfig) XXX_Size() int { + return xxx_messageInfo_Store_BigQueryConfig.Size(m) +} +func (m *Store_BigQueryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Store_BigQueryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Store_BigQueryConfig proto.InternalMessageInfo + +func (m *Store_BigQueryConfig) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Store_BigQueryConfig) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +type Store_CassandraConfig struct { + Host string `protobuf:"bytes,1,opt,name=host,proto3" json:"host,omitempty"` + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Store_CassandraConfig) Reset() { *m = Store_CassandraConfig{} } +func (m *Store_CassandraConfig) String() string { return proto.CompactTextString(m) } +func (*Store_CassandraConfig) ProtoMessage() {} +func (*Store_CassandraConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0, 2} +} + +func (m *Store_CassandraConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Store_CassandraConfig.Unmarshal(m, b) +} +func (m *Store_CassandraConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Store_CassandraConfig.Marshal(b, m, deterministic) +} +func (m *Store_CassandraConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Store_CassandraConfig.Merge(m, src) +} +func (m *Store_CassandraConfig) XXX_Size() int { + return xxx_messageInfo_Store_CassandraConfig.Size(m) +} +func (m *Store_CassandraConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Store_CassandraConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Store_CassandraConfig proto.InternalMessageInfo + +func (m *Store_CassandraConfig) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Store_CassandraConfig) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +type Store_Subscription struct { + // Name of featureSet to subscribe to. This field supports any valid basic POSIX regex, + // e.g. customer_.* or .* + // https://www.regular-expressions.info/posix.html + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Versions of the given featureSet that will be ingested into this store. + // Valid options for version: + // latest: only subscribe to latest version of feature set + // [version number]: pin to a specific version + // >[version number]: subscribe to all versions larger than or equal to [version number] + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Store_Subscription) Reset() { *m = Store_Subscription{} } +func (m *Store_Subscription) String() string { return proto.CompactTextString(m) } +func (*Store_Subscription) ProtoMessage() {} +func (*Store_Subscription) Descriptor() ([]byte, []int) { + return fileDescriptor_4b177bc9ccf64875, []int{0, 3} +} + +func (m *Store_Subscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Store_Subscription.Unmarshal(m, b) +} +func (m *Store_Subscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Store_Subscription.Marshal(b, m, deterministic) +} +func (m *Store_Subscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_Store_Subscription.Merge(m, src) +} +func (m *Store_Subscription) XXX_Size() int { + return xxx_messageInfo_Store_Subscription.Size(m) +} +func (m *Store_Subscription) XXX_DiscardUnknown() { + xxx_messageInfo_Store_Subscription.DiscardUnknown(m) +} + +var xxx_messageInfo_Store_Subscription proto.InternalMessageInfo + +func (m *Store_Subscription) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Store_Subscription) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func init() { + proto.RegisterEnum("feast.core.Store_StoreType", Store_StoreType_name, Store_StoreType_value) + proto.RegisterType((*Store)(nil), "feast.core.Store") + proto.RegisterType((*Store_RedisConfig)(nil), "feast.core.Store.RedisConfig") + proto.RegisterType((*Store_BigQueryConfig)(nil), "feast.core.Store.BigQueryConfig") + proto.RegisterType((*Store_CassandraConfig)(nil), "feast.core.Store.CassandraConfig") + proto.RegisterType((*Store_Subscription)(nil), "feast.core.Store.Subscription") +} + +func init() { proto.RegisterFile("feast/core/Store.proto", fileDescriptor_4b177bc9ccf64875) } + +var fileDescriptor_4b177bc9ccf64875 = []byte{ + // 442 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0x97, 0xae, 0xdd, 0x96, 0x97, 0xfe, 0x88, 0x7c, 0x40, 0x51, 0xd1, 0x50, 0xd8, 0xa9, + 0xa7, 0x58, 0x2a, 0xe2, 0x80, 0xc4, 0x81, 0xa6, 0x9d, 0x20, 0x02, 0x55, 0xcc, 0x05, 0x24, 0xb8, + 0x4c, 0xf9, 0xe1, 0x65, 0xde, 0xb4, 0x38, 0xd8, 0x2e, 0x52, 0xff, 0x3a, 0xfe, 0x35, 0x64, 0x27, + 0x69, 0x53, 0xda, 0xc3, 0x2e, 0x91, 0xfd, 0x7d, 0xdf, 0xf7, 0xc9, 0xd3, 0xf3, 0x7b, 0xf0, 0xe2, + 0x8e, 0xc6, 0x52, 0xe1, 0x94, 0x0b, 0x8a, 0x57, 0x8a, 0x0b, 0x1a, 0x94, 0x82, 0x2b, 0x8e, 0xc0, + 0xe8, 0x81, 0xd6, 0xaf, 0xfe, 0xf6, 0xa0, 0x67, 0x62, 0x08, 0x41, 0xb7, 0x88, 0x9f, 0xa8, 0x67, + 0xf9, 0xd6, 0xc4, 0x26, 0xe6, 0x8c, 0x30, 0x74, 0xd5, 0xa6, 0xa4, 0x5e, 0xc7, 0xb7, 0x26, 0xc3, + 0xe9, 0xcb, 0x60, 0x97, 0x18, 0x54, 0x40, 0xf3, 0xfd, 0xb6, 0x29, 0x29, 0x31, 0x46, 0xb4, 0x80, + 0x81, 0x5c, 0x27, 0x32, 0x15, 0xac, 0x54, 0x8c, 0x17, 0xd2, 0xeb, 0xfa, 0xa7, 0x13, 0x67, 0xfa, + 0xea, 0x48, 0x66, 0xcb, 0x46, 0xf6, 0x93, 0x50, 0x08, 0x7d, 0x41, 0x33, 0x26, 0x6f, 0x53, 0x5e, + 0xdc, 0xb1, 0xdc, 0x73, 0x7c, 0x6b, 0xe2, 0x4c, 0x2f, 0x0f, 0x21, 0x44, 0xbb, 0xe6, 0xc6, 0xf4, + 0xe9, 0x84, 0x38, 0x62, 0x77, 0x45, 0x9f, 0x61, 0x94, 0xb0, 0xfc, 0xf7, 0x9a, 0x8a, 0x4d, 0x83, + 0xe9, 0x1b, 0x8c, 0x7f, 0x88, 0x09, 0x59, 0x7e, 0xa3, 0x8d, 0x5b, 0xd2, 0xb0, 0x49, 0xad, 0x61, + 0x4b, 0x70, 0xd3, 0x58, 0xca, 0xb8, 0xc8, 0x44, 0xdc, 0xd0, 0x06, 0x86, 0xf6, 0xfa, 0x90, 0x36, + 0x6f, 0x9c, 0x5b, 0xdc, 0x28, 0xdd, 0x97, 0xc6, 0x6f, 0xc1, 0x69, 0x95, 0xae, 0x5b, 0x7f, 0xcf, + 0xa5, 0x6a, 0x5a, 0xaf, 0xcf, 0x5a, 0x2b, 0xb9, 0x50, 0xa6, 0xf5, 0x3d, 0x62, 0xce, 0xe3, 0x25, + 0x0c, 0xf7, 0x4b, 0x45, 0x97, 0x00, 0xa5, 0xe0, 0x0f, 0x34, 0x55, 0xb7, 0x2c, 0xab, 0xf3, 0xed, + 0x5a, 0x89, 0x32, 0x1d, 0xce, 0x62, 0x15, 0x4b, 0x6a, 0xc2, 0x9d, 0x2a, 0x5c, 0x2b, 0x51, 0x36, + 0x7e, 0x07, 0xa3, 0xff, 0x8a, 0x7d, 0x76, 0x29, 0xef, 0xa1, 0xdf, 0x7e, 0xc1, 0xa3, 0xd3, 0xe3, + 0xc1, 0xf9, 0x1f, 0x2a, 0x24, 0xe3, 0x45, 0xfd, 0xeb, 0xe6, 0x7a, 0xf5, 0x01, 0xec, 0xed, 0xe4, + 0x20, 0x07, 0xce, 0xa3, 0xe5, 0x8f, 0xd9, 0x97, 0x68, 0xe1, 0x9e, 0x20, 0x1b, 0x7a, 0xe4, 0x7a, + 0x11, 0xad, 0x5c, 0x0b, 0xf5, 0xe1, 0x22, 0x8c, 0x3e, 0xde, 0x7c, 0xbf, 0x26, 0x3f, 0xdd, 0x0e, + 0x1a, 0x80, 0x3d, 0x9f, 0xad, 0x56, 0xb3, 0xe5, 0x82, 0xcc, 0xdc, 0xd3, 0xf0, 0x02, 0xce, 0xaa, + 0x77, 0x08, 0x23, 0x68, 0xcd, 0x73, 0x08, 0x86, 0xfb, 0x55, 0xcf, 0xf9, 0x2f, 0x9c, 0x33, 0x75, + 0xbf, 0x4e, 0x82, 0x94, 0x3f, 0xe1, 0x9c, 0x3f, 0xd0, 0x47, 0x5c, 0x2d, 0x84, 0xcc, 0x1e, 0x71, + 0xce, 0xb1, 0x59, 0x06, 0x89, 0x77, 0x4b, 0x92, 0x9c, 0x19, 0xe9, 0xcd, 0xbf, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xdc, 0xaf, 0xad, 0x8c, 0x39, 0x03, 0x00, 0x00, +} diff --git a/sdk/go/protos/feast/serving/ServingService.pb.go b/sdk/go/protos/feast/serving/ServingService.pb.go new file mode 100644 index 00000000000..49c730ad3ca --- /dev/null +++ b/sdk/go/protos/feast/serving/ServingService.pb.go @@ -0,0 +1,1173 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/serving/ServingService.proto + +package serving + +import ( + context "context" + fmt "fmt" + types "github.com/gojek/feast/sdk/go/protos/feast/types" + proto "github.com/golang/protobuf/proto" + duration "github.com/golang/protobuf/ptypes/duration" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type FeastServingType int32 + +const ( + FeastServingType_FEAST_SERVING_TYPE_INVALID FeastServingType = 0 + // Online serving receives entity data directly and synchronously and will + // respond immediately. + FeastServingType_FEAST_SERVING_TYPE_ONLINE FeastServingType = 1 + // Batch serving receives entity data asynchronously and orchestrates the + // retrieval through a staging location. + FeastServingType_FEAST_SERVING_TYPE_BATCH FeastServingType = 2 +) + +var FeastServingType_name = map[int32]string{ + 0: "FEAST_SERVING_TYPE_INVALID", + 1: "FEAST_SERVING_TYPE_ONLINE", + 2: "FEAST_SERVING_TYPE_BATCH", +} + +var FeastServingType_value = map[string]int32{ + "FEAST_SERVING_TYPE_INVALID": 0, + "FEAST_SERVING_TYPE_ONLINE": 1, + "FEAST_SERVING_TYPE_BATCH": 2, +} + +func (x FeastServingType) String() string { + return proto.EnumName(FeastServingType_name, int32(x)) +} + +func (FeastServingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{0} +} + +type JobType int32 + +const ( + JobType_JOB_TYPE_INVALID JobType = 0 + JobType_JOB_TYPE_DOWNLOAD JobType = 1 +) + +var JobType_name = map[int32]string{ + 0: "JOB_TYPE_INVALID", + 1: "JOB_TYPE_DOWNLOAD", +} + +var JobType_value = map[string]int32{ + "JOB_TYPE_INVALID": 0, + "JOB_TYPE_DOWNLOAD": 1, +} + +func (x JobType) String() string { + return proto.EnumName(JobType_name, int32(x)) +} + +func (JobType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{1} +} + +type JobStatus int32 + +const ( + JobStatus_JOB_STATUS_INVALID JobStatus = 0 + JobStatus_JOB_STATUS_PENDING JobStatus = 1 + JobStatus_JOB_STATUS_RUNNING JobStatus = 2 + JobStatus_JOB_STATUS_DONE JobStatus = 3 +) + +var JobStatus_name = map[int32]string{ + 0: "JOB_STATUS_INVALID", + 1: "JOB_STATUS_PENDING", + 2: "JOB_STATUS_RUNNING", + 3: "JOB_STATUS_DONE", +} + +var JobStatus_value = map[string]int32{ + "JOB_STATUS_INVALID": 0, + "JOB_STATUS_PENDING": 1, + "JOB_STATUS_RUNNING": 2, + "JOB_STATUS_DONE": 3, +} + +func (x JobStatus) String() string { + return proto.EnumName(JobStatus_name, int32(x)) +} + +func (JobStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{2} +} + +type DataFormat int32 + +const ( + DataFormat_DATA_FORMAT_INVALID DataFormat = 0 + DataFormat_DATA_FORMAT_AVRO DataFormat = 1 +) + +var DataFormat_name = map[int32]string{ + 0: "DATA_FORMAT_INVALID", + 1: "DATA_FORMAT_AVRO", +} + +var DataFormat_value = map[string]int32{ + "DATA_FORMAT_INVALID": 0, + "DATA_FORMAT_AVRO": 1, +} + +func (x DataFormat) String() string { + return proto.EnumName(DataFormat_name, int32(x)) +} + +func (DataFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{3} +} + +type GetFeastServingInfoRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeastServingInfoRequest) Reset() { *m = GetFeastServingInfoRequest{} } +func (m *GetFeastServingInfoRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeastServingInfoRequest) ProtoMessage() {} +func (*GetFeastServingInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{0} +} + +func (m *GetFeastServingInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeastServingInfoRequest.Unmarshal(m, b) +} +func (m *GetFeastServingInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeastServingInfoRequest.Marshal(b, m, deterministic) +} +func (m *GetFeastServingInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeastServingInfoRequest.Merge(m, src) +} +func (m *GetFeastServingInfoRequest) XXX_Size() int { + return xxx_messageInfo_GetFeastServingInfoRequest.Size(m) +} +func (m *GetFeastServingInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeastServingInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeastServingInfoRequest proto.InternalMessageInfo + +type GetFeastServingInfoResponse struct { + // Feast version of this serving deployment. + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + // Type of serving deployment, either ONLINE or BATCH. Different store types support different + // feature retrieval methods. + Type FeastServingType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.serving.FeastServingType" json:"type,omitempty"` + // Note: Batch specific options start from 10. + // Staging location for this serving store, if any. + JobStagingLocation string `protobuf:"bytes,10,opt,name=job_staging_location,json=jobStagingLocation,proto3" json:"job_staging_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeastServingInfoResponse) Reset() { *m = GetFeastServingInfoResponse{} } +func (m *GetFeastServingInfoResponse) String() string { return proto.CompactTextString(m) } +func (*GetFeastServingInfoResponse) ProtoMessage() {} +func (*GetFeastServingInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{1} +} + +func (m *GetFeastServingInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeastServingInfoResponse.Unmarshal(m, b) +} +func (m *GetFeastServingInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeastServingInfoResponse.Marshal(b, m, deterministic) +} +func (m *GetFeastServingInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeastServingInfoResponse.Merge(m, src) +} +func (m *GetFeastServingInfoResponse) XXX_Size() int { + return xxx_messageInfo_GetFeastServingInfoResponse.Size(m) +} +func (m *GetFeastServingInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeastServingInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeastServingInfoResponse proto.InternalMessageInfo + +func (m *GetFeastServingInfoResponse) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *GetFeastServingInfoResponse) GetType() FeastServingType { + if m != nil { + return m.Type + } + return FeastServingType_FEAST_SERVING_TYPE_INVALID +} + +func (m *GetFeastServingInfoResponse) GetJobStagingLocation() string { + if m != nil { + return m.JobStagingLocation + } + return "" +} + +type FeatureSetRequest struct { + // Feature set name + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Feature set version + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // Features that should be retrieved from this feature set + FeatureNames []string `protobuf:"bytes,3,rep,name=feature_names,json=featureNames,proto3" json:"feature_names,omitempty"` + // The features will be retrieved if: + // entity_timestamp - max_age <= event_timestamp <= entity_timestamp + // + // If unspecified the default max_age specified in FeatureSetSpec will + // be used. + MaxAge *duration.Duration `protobuf:"bytes,4,opt,name=max_age,json=maxAge,proto3" json:"max_age,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureSetRequest) Reset() { *m = FeatureSetRequest{} } +func (m *FeatureSetRequest) String() string { return proto.CompactTextString(m) } +func (*FeatureSetRequest) ProtoMessage() {} +func (*FeatureSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{2} +} + +func (m *FeatureSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureSetRequest.Unmarshal(m, b) +} +func (m *FeatureSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureSetRequest.Marshal(b, m, deterministic) +} +func (m *FeatureSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSetRequest.Merge(m, src) +} +func (m *FeatureSetRequest) XXX_Size() int { + return xxx_messageInfo_FeatureSetRequest.Size(m) +} +func (m *FeatureSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureSetRequest proto.InternalMessageInfo + +func (m *FeatureSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FeatureSetRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *FeatureSetRequest) GetFeatureNames() []string { + if m != nil { + return m.FeatureNames + } + return nil +} + +func (m *FeatureSetRequest) GetMaxAge() *duration.Duration { + if m != nil { + return m.MaxAge + } + return nil +} + +type GetOnlineFeaturesRequest struct { + // List of feature sets and their features that are being retrieved + FeatureSets []*FeatureSetRequest `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + // List of entity rows, containing entity id and timestamp data. + // Used during retrieval of feature rows and for joining feature + // rows into a final dataset + EntityRows []*GetOnlineFeaturesRequest_EntityRow `protobuf:"bytes,2,rep,name=entity_rows,json=entityRows,proto3" json:"entity_rows,omitempty"` + // Option to omit entities from the response. If true, only feature + // values will be returned. + OmitEntitiesInResponse bool `protobuf:"varint,3,opt,name=omit_entities_in_response,json=omitEntitiesInResponse,proto3" json:"omit_entities_in_response,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOnlineFeaturesRequest) Reset() { *m = GetOnlineFeaturesRequest{} } +func (m *GetOnlineFeaturesRequest) String() string { return proto.CompactTextString(m) } +func (*GetOnlineFeaturesRequest) ProtoMessage() {} +func (*GetOnlineFeaturesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{3} +} + +func (m *GetOnlineFeaturesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOnlineFeaturesRequest.Unmarshal(m, b) +} +func (m *GetOnlineFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOnlineFeaturesRequest.Marshal(b, m, deterministic) +} +func (m *GetOnlineFeaturesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOnlineFeaturesRequest.Merge(m, src) +} +func (m *GetOnlineFeaturesRequest) XXX_Size() int { + return xxx_messageInfo_GetOnlineFeaturesRequest.Size(m) +} +func (m *GetOnlineFeaturesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOnlineFeaturesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOnlineFeaturesRequest proto.InternalMessageInfo + +func (m *GetOnlineFeaturesRequest) GetFeatureSets() []*FeatureSetRequest { + if m != nil { + return m.FeatureSets + } + return nil +} + +func (m *GetOnlineFeaturesRequest) GetEntityRows() []*GetOnlineFeaturesRequest_EntityRow { + if m != nil { + return m.EntityRows + } + return nil +} + +func (m *GetOnlineFeaturesRequest) GetOmitEntitiesInResponse() bool { + if m != nil { + return m.OmitEntitiesInResponse + } + return false +} + +type GetOnlineFeaturesRequest_EntityRow struct { + // Request timestamp of this row. This value will be used, together with maxAge, + // to determine feature staleness. + EntityTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=entity_timestamp,json=entityTimestamp,proto3" json:"entity_timestamp,omitempty"` + // Map containing mapping of entity name to entity value. + Fields map[string]*types.Value `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOnlineFeaturesRequest_EntityRow) Reset() { *m = GetOnlineFeaturesRequest_EntityRow{} } +func (m *GetOnlineFeaturesRequest_EntityRow) String() string { return proto.CompactTextString(m) } +func (*GetOnlineFeaturesRequest_EntityRow) ProtoMessage() {} +func (*GetOnlineFeaturesRequest_EntityRow) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{3, 0} +} + +func (m *GetOnlineFeaturesRequest_EntityRow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow.Unmarshal(m, b) +} +func (m *GetOnlineFeaturesRequest_EntityRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow.Marshal(b, m, deterministic) +} +func (m *GetOnlineFeaturesRequest_EntityRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow.Merge(m, src) +} +func (m *GetOnlineFeaturesRequest_EntityRow) XXX_Size() int { + return xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow.Size(m) +} +func (m *GetOnlineFeaturesRequest_EntityRow) XXX_DiscardUnknown() { + xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOnlineFeaturesRequest_EntityRow proto.InternalMessageInfo + +func (m *GetOnlineFeaturesRequest_EntityRow) GetEntityTimestamp() *timestamp.Timestamp { + if m != nil { + return m.EntityTimestamp + } + return nil +} + +func (m *GetOnlineFeaturesRequest_EntityRow) GetFields() map[string]*types.Value { + if m != nil { + return m.Fields + } + return nil +} + +type GetBatchFeaturesRequest struct { + // List of feature sets and their features that are being retrieved. + FeatureSets []*FeatureSetRequest `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + // Source of the entity dataset containing the timestamps and entity keys to retrieve + // features for. + DatasetSource *DatasetSource `protobuf:"bytes,2,opt,name=dataset_source,json=datasetSource,proto3" json:"dataset_source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBatchFeaturesRequest) Reset() { *m = GetBatchFeaturesRequest{} } +func (m *GetBatchFeaturesRequest) String() string { return proto.CompactTextString(m) } +func (*GetBatchFeaturesRequest) ProtoMessage() {} +func (*GetBatchFeaturesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{4} +} + +func (m *GetBatchFeaturesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBatchFeaturesRequest.Unmarshal(m, b) +} +func (m *GetBatchFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBatchFeaturesRequest.Marshal(b, m, deterministic) +} +func (m *GetBatchFeaturesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBatchFeaturesRequest.Merge(m, src) +} +func (m *GetBatchFeaturesRequest) XXX_Size() int { + return xxx_messageInfo_GetBatchFeaturesRequest.Size(m) +} +func (m *GetBatchFeaturesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBatchFeaturesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBatchFeaturesRequest proto.InternalMessageInfo + +func (m *GetBatchFeaturesRequest) GetFeatureSets() []*FeatureSetRequest { + if m != nil { + return m.FeatureSets + } + return nil +} + +func (m *GetBatchFeaturesRequest) GetDatasetSource() *DatasetSource { + if m != nil { + return m.DatasetSource + } + return nil +} + +type GetOnlineFeaturesResponse struct { + // Feature values retrieved from feast. + FieldValues []*GetOnlineFeaturesResponse_FieldValues `protobuf:"bytes,1,rep,name=field_values,json=fieldValues,proto3" json:"field_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOnlineFeaturesResponse) Reset() { *m = GetOnlineFeaturesResponse{} } +func (m *GetOnlineFeaturesResponse) String() string { return proto.CompactTextString(m) } +func (*GetOnlineFeaturesResponse) ProtoMessage() {} +func (*GetOnlineFeaturesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{5} +} + +func (m *GetOnlineFeaturesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOnlineFeaturesResponse.Unmarshal(m, b) +} +func (m *GetOnlineFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOnlineFeaturesResponse.Marshal(b, m, deterministic) +} +func (m *GetOnlineFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOnlineFeaturesResponse.Merge(m, src) +} +func (m *GetOnlineFeaturesResponse) XXX_Size() int { + return xxx_messageInfo_GetOnlineFeaturesResponse.Size(m) +} +func (m *GetOnlineFeaturesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetOnlineFeaturesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOnlineFeaturesResponse proto.InternalMessageInfo + +func (m *GetOnlineFeaturesResponse) GetFieldValues() []*GetOnlineFeaturesResponse_FieldValues { + if m != nil { + return m.FieldValues + } + return nil +} + +type GetOnlineFeaturesResponse_FieldValues struct { + // Map of feature or entity name to feature/entity values. + // Timestamps are not returned in this response. + Fields map[string]*types.Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOnlineFeaturesResponse_FieldValues) Reset() { *m = GetOnlineFeaturesResponse_FieldValues{} } +func (m *GetOnlineFeaturesResponse_FieldValues) String() string { return proto.CompactTextString(m) } +func (*GetOnlineFeaturesResponse_FieldValues) ProtoMessage() {} +func (*GetOnlineFeaturesResponse_FieldValues) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{5, 0} +} + +func (m *GetOnlineFeaturesResponse_FieldValues) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues.Unmarshal(m, b) +} +func (m *GetOnlineFeaturesResponse_FieldValues) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues.Marshal(b, m, deterministic) +} +func (m *GetOnlineFeaturesResponse_FieldValues) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues.Merge(m, src) +} +func (m *GetOnlineFeaturesResponse_FieldValues) XXX_Size() int { + return xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues.Size(m) +} +func (m *GetOnlineFeaturesResponse_FieldValues) XXX_DiscardUnknown() { + xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOnlineFeaturesResponse_FieldValues proto.InternalMessageInfo + +func (m *GetOnlineFeaturesResponse_FieldValues) GetFields() map[string]*types.Value { + if m != nil { + return m.Fields + } + return nil +} + +type GetBatchFeaturesResponse struct { + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBatchFeaturesResponse) Reset() { *m = GetBatchFeaturesResponse{} } +func (m *GetBatchFeaturesResponse) String() string { return proto.CompactTextString(m) } +func (*GetBatchFeaturesResponse) ProtoMessage() {} +func (*GetBatchFeaturesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{6} +} + +func (m *GetBatchFeaturesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBatchFeaturesResponse.Unmarshal(m, b) +} +func (m *GetBatchFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBatchFeaturesResponse.Marshal(b, m, deterministic) +} +func (m *GetBatchFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBatchFeaturesResponse.Merge(m, src) +} +func (m *GetBatchFeaturesResponse) XXX_Size() int { + return xxx_messageInfo_GetBatchFeaturesResponse.Size(m) +} +func (m *GetBatchFeaturesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetBatchFeaturesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBatchFeaturesResponse proto.InternalMessageInfo + +func (m *GetBatchFeaturesResponse) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +type GetJobRequest struct { + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{7} +} + +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (m *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(m, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +type GetJobResponse struct { + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobResponse) Reset() { *m = GetJobResponse{} } +func (m *GetJobResponse) String() string { return proto.CompactTextString(m) } +func (*GetJobResponse) ProtoMessage() {} +func (*GetJobResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{8} +} + +func (m *GetJobResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobResponse.Unmarshal(m, b) +} +func (m *GetJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobResponse.Marshal(b, m, deterministic) +} +func (m *GetJobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobResponse.Merge(m, src) +} +func (m *GetJobResponse) XXX_Size() int { + return xxx_messageInfo_GetJobResponse.Size(m) +} +func (m *GetJobResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobResponse proto.InternalMessageInfo + +func (m *GetJobResponse) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +type Job struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Output only. The type of the job. + Type JobType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.serving.JobType" json:"type,omitempty"` + // Output only. Current state of the job. + Status JobStatus `protobuf:"varint,3,opt,name=status,proto3,enum=feast.serving.JobStatus" json:"status,omitempty"` + // Output only. If not empty, the job has failed with this error message. + Error string `protobuf:"bytes,4,opt,name=error,proto3" json:"error,omitempty"` + // Output only. The list of URIs for the files to be downloaded or + // uploaded (depends on the job type) for this particular job. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Output only. The data format for all the files. + // For CSV format, the files contain both feature values and a column header. + DataFormat DataFormat `protobuf:"varint,6,opt,name=data_format,json=dataFormat,proto3,enum=feast.serving.DataFormat" json:"data_format,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{9} +} + +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (m *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(m, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Job) GetType() JobType { + if m != nil { + return m.Type + } + return JobType_JOB_TYPE_INVALID +} + +func (m *Job) GetStatus() JobStatus { + if m != nil { + return m.Status + } + return JobStatus_JOB_STATUS_INVALID +} + +func (m *Job) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +func (m *Job) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *Job) GetDataFormat() DataFormat { + if m != nil { + return m.DataFormat + } + return DataFormat_DATA_FORMAT_INVALID +} + +type DatasetSource struct { + // Types that are valid to be assigned to DatasetSource: + // *DatasetSource_FileSource_ + DatasetSource isDatasetSource_DatasetSource `protobuf_oneof:"dataset_source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetSource) Reset() { *m = DatasetSource{} } +func (m *DatasetSource) String() string { return proto.CompactTextString(m) } +func (*DatasetSource) ProtoMessage() {} +func (*DatasetSource) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{10} +} + +func (m *DatasetSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetSource.Unmarshal(m, b) +} +func (m *DatasetSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetSource.Marshal(b, m, deterministic) +} +func (m *DatasetSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetSource.Merge(m, src) +} +func (m *DatasetSource) XXX_Size() int { + return xxx_messageInfo_DatasetSource.Size(m) +} +func (m *DatasetSource) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetSource.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetSource proto.InternalMessageInfo + +type isDatasetSource_DatasetSource interface { + isDatasetSource_DatasetSource() +} + +type DatasetSource_FileSource_ struct { + FileSource *DatasetSource_FileSource `protobuf:"bytes,1,opt,name=file_source,json=fileSource,proto3,oneof"` +} + +func (*DatasetSource_FileSource_) isDatasetSource_DatasetSource() {} + +func (m *DatasetSource) GetDatasetSource() isDatasetSource_DatasetSource { + if m != nil { + return m.DatasetSource + } + return nil +} + +func (m *DatasetSource) GetFileSource() *DatasetSource_FileSource { + if x, ok := m.GetDatasetSource().(*DatasetSource_FileSource_); ok { + return x.FileSource + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*DatasetSource) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*DatasetSource_FileSource_)(nil), + } +} + +type DatasetSource_FileSource struct { + // URIs to retrieve the dataset from, e.g. gs://bucket/directory/object.csv. Wildcards are + // supported. This data must be compatible to be uploaded to the serving store, and also be + // accessible by this serving instance. + FileUris []string `protobuf:"bytes,1,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Format of the data. Currently only avro is supported. + DataFormat DataFormat `protobuf:"varint,2,opt,name=data_format,json=dataFormat,proto3,enum=feast.serving.DataFormat" json:"data_format,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetSource_FileSource) Reset() { *m = DatasetSource_FileSource{} } +func (m *DatasetSource_FileSource) String() string { return proto.CompactTextString(m) } +func (*DatasetSource_FileSource) ProtoMessage() {} +func (*DatasetSource_FileSource) Descriptor() ([]byte, []int) { + return fileDescriptor_0c1ba93cf29a8d9d, []int{10, 0} +} + +func (m *DatasetSource_FileSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetSource_FileSource.Unmarshal(m, b) +} +func (m *DatasetSource_FileSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetSource_FileSource.Marshal(b, m, deterministic) +} +func (m *DatasetSource_FileSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetSource_FileSource.Merge(m, src) +} +func (m *DatasetSource_FileSource) XXX_Size() int { + return xxx_messageInfo_DatasetSource_FileSource.Size(m) +} +func (m *DatasetSource_FileSource) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetSource_FileSource.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetSource_FileSource proto.InternalMessageInfo + +func (m *DatasetSource_FileSource) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *DatasetSource_FileSource) GetDataFormat() DataFormat { + if m != nil { + return m.DataFormat + } + return DataFormat_DATA_FORMAT_INVALID +} + +func init() { + proto.RegisterEnum("feast.serving.FeastServingType", FeastServingType_name, FeastServingType_value) + proto.RegisterEnum("feast.serving.JobType", JobType_name, JobType_value) + proto.RegisterEnum("feast.serving.JobStatus", JobStatus_name, JobStatus_value) + proto.RegisterEnum("feast.serving.DataFormat", DataFormat_name, DataFormat_value) + proto.RegisterType((*GetFeastServingInfoRequest)(nil), "feast.serving.GetFeastServingInfoRequest") + proto.RegisterType((*GetFeastServingInfoResponse)(nil), "feast.serving.GetFeastServingInfoResponse") + proto.RegisterType((*FeatureSetRequest)(nil), "feast.serving.FeatureSetRequest") + proto.RegisterType((*GetOnlineFeaturesRequest)(nil), "feast.serving.GetOnlineFeaturesRequest") + proto.RegisterType((*GetOnlineFeaturesRequest_EntityRow)(nil), "feast.serving.GetOnlineFeaturesRequest.EntityRow") + proto.RegisterMapType((map[string]*types.Value)(nil), "feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry") + proto.RegisterType((*GetBatchFeaturesRequest)(nil), "feast.serving.GetBatchFeaturesRequest") + proto.RegisterType((*GetOnlineFeaturesResponse)(nil), "feast.serving.GetOnlineFeaturesResponse") + proto.RegisterType((*GetOnlineFeaturesResponse_FieldValues)(nil), "feast.serving.GetOnlineFeaturesResponse.FieldValues") + proto.RegisterMapType((map[string]*types.Value)(nil), "feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry") + proto.RegisterType((*GetBatchFeaturesResponse)(nil), "feast.serving.GetBatchFeaturesResponse") + proto.RegisterType((*GetJobRequest)(nil), "feast.serving.GetJobRequest") + proto.RegisterType((*GetJobResponse)(nil), "feast.serving.GetJobResponse") + proto.RegisterType((*Job)(nil), "feast.serving.Job") + proto.RegisterType((*DatasetSource)(nil), "feast.serving.DatasetSource") + proto.RegisterType((*DatasetSource_FileSource)(nil), "feast.serving.DatasetSource.FileSource") +} + +func init() { proto.RegisterFile("feast/serving/ServingService.proto", fileDescriptor_0c1ba93cf29a8d9d) } + +var fileDescriptor_0c1ba93cf29a8d9d = []byte{ + // 1105 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x73, 0xda, 0x46, + 0x14, 0x8f, 0xc0, 0xc6, 0xe1, 0x11, 0x63, 0x79, 0xed, 0xda, 0xb2, 0xe2, 0x24, 0x0c, 0xed, 0xd4, + 0x94, 0x83, 0x68, 0x49, 0x9b, 0x69, 0xd3, 0xe9, 0x4c, 0xc0, 0x08, 0x82, 0xc7, 0x11, 0x9e, 0x05, + 0x3b, 0x6d, 0x2f, 0x1a, 0x01, 0x0b, 0x96, 0x0d, 0x5a, 0x57, 0xbb, 0x38, 0xf1, 0xd7, 0xe8, 0xb1, + 0x87, 0x5e, 0x7a, 0xee, 0xb5, 0x9f, 0xa4, 0x9f, 0xa0, 0xb7, 0x7e, 0x83, 0x1e, 0x3b, 0x5a, 0xad, + 0x30, 0xff, 0x9c, 0xd8, 0x9d, 0x69, 0x4e, 0xda, 0x7d, 0xef, 0xf7, 0xfe, 0xee, 0x7b, 0x4f, 0x0f, + 0xb2, 0x3d, 0xe2, 0x30, 0x5e, 0x60, 0xc4, 0xbf, 0x74, 0xbd, 0x7e, 0xa1, 0x19, 0x7e, 0xc5, 0xa7, + 0x43, 0x8c, 0x0b, 0x9f, 0x72, 0x8a, 0x56, 0x05, 0xc6, 0x90, 0x18, 0xfd, 0x49, 0x9f, 0xd2, 0xfe, + 0x80, 0x14, 0x04, 0xb3, 0x3d, 0xea, 0x15, 0xb8, 0x3b, 0x24, 0x8c, 0x3b, 0xc3, 0x8b, 0x10, 0xaf, + 0x3f, 0x9e, 0x05, 0x74, 0x47, 0xbe, 0xc3, 0x5d, 0xea, 0x49, 0xfe, 0x76, 0x68, 0x93, 0x5f, 0x5d, + 0x10, 0x56, 0x38, 0x71, 0x06, 0x23, 0x69, 0x28, 0xbb, 0x0b, 0x7a, 0x8d, 0xf0, 0x6a, 0xc0, 0x95, + 0x8e, 0xd4, 0xbd, 0x1e, 0xc5, 0xe4, 0xa7, 0x11, 0x61, 0x3c, 0xfb, 0xab, 0x02, 0x0f, 0x17, 0xb2, + 0xd9, 0x05, 0xf5, 0x18, 0x41, 0x1a, 0xac, 0x5c, 0x12, 0x9f, 0xb9, 0xd4, 0xd3, 0x94, 0x8c, 0x92, + 0x4b, 0xe2, 0xe8, 0x8a, 0x9e, 0xc2, 0x52, 0x60, 0x4c, 0x8b, 0x65, 0x94, 0x5c, 0xba, 0xf8, 0xc4, + 0x98, 0x8a, 0xc7, 0x98, 0x54, 0xd8, 0xba, 0xba, 0x20, 0x58, 0x80, 0xd1, 0xe7, 0xb0, 0x79, 0x46, + 0xdb, 0x36, 0xe3, 0x4e, 0xdf, 0xf5, 0xfa, 0xf6, 0x80, 0x76, 0x44, 0x0c, 0x1a, 0x08, 0xdd, 0xe8, + 0x8c, 0xb6, 0x9b, 0x21, 0xeb, 0x50, 0x72, 0xb2, 0xbf, 0x28, 0xb0, 0x5e, 0x25, 0x0e, 0x1f, 0xf9, + 0xa4, 0x49, 0xb8, 0x74, 0x1b, 0x21, 0x58, 0xf2, 0x9c, 0x21, 0x91, 0x3e, 0x89, 0xf3, 0xa4, 0xab, + 0x81, 0x4f, 0xcb, 0xd7, 0xae, 0x7e, 0x0c, 0x41, 0xb6, 0x03, 0x15, 0x76, 0x80, 0x64, 0x5a, 0x3c, + 0x13, 0xcf, 0x25, 0xf1, 0x03, 0x49, 0xb4, 0x02, 0x1a, 0x2a, 0xc2, 0xca, 0xd0, 0x79, 0x6b, 0x3b, + 0x7d, 0xa2, 0x2d, 0x65, 0x94, 0x5c, 0xaa, 0xb8, 0x63, 0x84, 0x29, 0x37, 0xa2, 0x94, 0x1b, 0x15, + 0x99, 0x72, 0x9c, 0x18, 0x3a, 0x6f, 0x4b, 0x7d, 0x92, 0xfd, 0x3b, 0x0e, 0x5a, 0x8d, 0xf0, 0x86, + 0x37, 0x70, 0x3d, 0x22, 0xbd, 0x64, 0x91, 0x8f, 0xfb, 0x10, 0x19, 0xb0, 0x19, 0xe1, 0x4c, 0x53, + 0x32, 0xf1, 0x5c, 0xaa, 0x98, 0x99, 0x4f, 0xd4, 0x74, 0x6c, 0x38, 0xd5, 0x1b, 0x93, 0x18, 0xc2, + 0x90, 0x22, 0x1e, 0x77, 0xf9, 0x95, 0xed, 0xd3, 0x37, 0x4c, 0x8b, 0x09, 0x1d, 0x5f, 0xcc, 0xe8, + 0xb8, 0xc9, 0x05, 0xc3, 0x14, 0xa2, 0x98, 0xbe, 0xc1, 0x40, 0xa2, 0x23, 0x43, 0xdf, 0xc0, 0x0e, + 0x1d, 0xba, 0xdc, 0x16, 0x24, 0x97, 0x30, 0xdb, 0xf5, 0x6c, 0x5f, 0x3e, 0xb8, 0x16, 0xcf, 0x28, + 0xb9, 0xfb, 0x78, 0x2b, 0x00, 0x98, 0x92, 0x5f, 0xf7, 0xa2, 0x72, 0xd0, 0xff, 0x51, 0x20, 0x39, + 0x56, 0x8a, 0x4c, 0x50, 0xa5, 0x73, 0xe3, 0x6a, 0x15, 0x2f, 0x92, 0x2a, 0xea, 0x73, 0xb9, 0x6b, + 0x45, 0x08, 0xbc, 0x16, 0xca, 0x8c, 0x09, 0xe8, 0x18, 0x12, 0x3d, 0x97, 0x0c, 0xba, 0x51, 0x78, + 0xdf, 0xdd, 0x39, 0x3c, 0xa3, 0x2a, 0xe4, 0x4d, 0x8f, 0xfb, 0x57, 0x58, 0x2a, 0xd3, 0x5f, 0x41, + 0x6a, 0x82, 0x8c, 0x54, 0x88, 0x9f, 0x93, 0x2b, 0x59, 0x31, 0xc1, 0x11, 0xe5, 0x60, 0xf9, 0x32, + 0x68, 0x14, 0x51, 0x2e, 0xa9, 0x22, 0x92, 0x66, 0x45, 0x0b, 0x19, 0xa2, 0x85, 0x70, 0x08, 0x78, + 0x1e, 0xfb, 0x5a, 0xc9, 0xfe, 0xa6, 0xc0, 0x76, 0x8d, 0xf0, 0xb2, 0xc3, 0x3b, 0xa7, 0xff, 0xcb, + 0x53, 0xef, 0x43, 0xba, 0xeb, 0x70, 0x87, 0x11, 0x6e, 0x33, 0x3a, 0xf2, 0x3b, 0x91, 0x5f, 0xbb, + 0x33, 0x6a, 0x2a, 0x21, 0xa8, 0x29, 0x30, 0x78, 0xb5, 0x3b, 0x79, 0xcd, 0xfe, 0x1e, 0x83, 0x9d, + 0x05, 0xf9, 0x92, 0xdd, 0xfc, 0x1a, 0x1e, 0x88, 0xe4, 0xd8, 0x22, 0xac, 0xc8, 0xcf, 0x2f, 0xdf, + 0x9f, 0xef, 0x50, 0x3e, 0x4c, 0xb3, 0xc8, 0x0c, 0xc3, 0xa9, 0xde, 0xf5, 0x45, 0xff, 0x43, 0x91, + 0xc9, 0x0e, 0xef, 0xe8, 0xfb, 0xf1, 0x93, 0x86, 0x26, 0x5e, 0xfc, 0x17, 0x13, 0x1f, 0xe2, 0x55, + 0x5f, 0x88, 0x06, 0x9e, 0x79, 0x54, 0x99, 0xad, 0x4f, 0x20, 0x7e, 0x46, 0xdb, 0xb2, 0xa2, 0xd1, + 0x4c, 0x04, 0x07, 0xb4, 0x8d, 0x03, 0x76, 0xf6, 0x2b, 0x58, 0xad, 0x11, 0x1e, 0x5c, 0x65, 0x31, + 0xdc, 0x4e, 0xec, 0x19, 0xa4, 0x23, 0xb1, 0x3b, 0x99, 0xfb, 0x4b, 0x81, 0xf8, 0x01, 0x6d, 0xa3, + 0x34, 0xc4, 0xdc, 0xae, 0x8c, 0x3b, 0xe6, 0x76, 0x51, 0x7e, 0x6a, 0x1c, 0x6f, 0xcd, 0x8b, 0x4f, + 0x4d, 0xe1, 0x04, 0xe3, 0x0e, 0x1f, 0x31, 0xd1, 0xed, 0xe9, 0xa2, 0x36, 0x8f, 0x6e, 0x0a, 0x3e, + 0x96, 0x38, 0xb4, 0x09, 0xcb, 0xc4, 0xf7, 0xa9, 0x2f, 0x46, 0x63, 0x12, 0x87, 0x17, 0xf4, 0x10, + 0x92, 0x3d, 0x77, 0x40, 0xec, 0x91, 0xef, 0x32, 0x6d, 0x59, 0xcc, 0xd4, 0xfb, 0x01, 0xe1, 0xd8, + 0x77, 0x19, 0x7a, 0x0e, 0xa9, 0xa0, 0x34, 0xed, 0x1e, 0xf5, 0x87, 0x0e, 0xd7, 0x12, 0xc2, 0xd2, + 0xce, 0x82, 0x5a, 0xae, 0x0a, 0x00, 0x86, 0xee, 0xf8, 0x9c, 0xfd, 0x53, 0x81, 0xd5, 0xa9, 0x32, + 0x47, 0x07, 0x90, 0x12, 0xa6, 0x64, 0x67, 0x84, 0x49, 0xda, 0x7b, 0x57, 0x67, 0x18, 0x55, 0x77, + 0x40, 0xc2, 0xe3, 0xcb, 0x7b, 0x18, 0x7a, 0xe3, 0x9b, 0x4e, 0x00, 0xae, 0x79, 0xd3, 0x41, 0x28, + 0xef, 0x0e, 0x22, 0x76, 0x87, 0x20, 0xca, 0xea, 0x6c, 0x3f, 0xe7, 0x29, 0xa8, 0xb3, 0xff, 0x45, + 0xf4, 0x18, 0xf4, 0xaa, 0x59, 0x6a, 0xb6, 0xec, 0xa6, 0x89, 0x4f, 0xea, 0x56, 0xcd, 0x6e, 0xfd, + 0x70, 0x64, 0xda, 0x75, 0xeb, 0xa4, 0x74, 0x58, 0xaf, 0xa8, 0xf7, 0xd0, 0x23, 0xd8, 0x59, 0xc0, + 0x6f, 0x58, 0x87, 0x75, 0xcb, 0x54, 0x15, 0xb4, 0x0b, 0xda, 0x02, 0x76, 0xb9, 0xd4, 0xda, 0x7f, + 0xa9, 0xc6, 0xf2, 0xcf, 0x60, 0x45, 0xbe, 0x3c, 0xda, 0x04, 0xf5, 0xa0, 0x51, 0x9e, 0xd5, 0xfe, + 0x11, 0xac, 0x8f, 0xa9, 0x95, 0xc6, 0x6b, 0xeb, 0xb0, 0x51, 0xaa, 0xa8, 0x4a, 0xfe, 0x14, 0x92, + 0xe3, 0x1a, 0x40, 0x5b, 0x80, 0x02, 0x4c, 0xb3, 0x55, 0x6a, 0x1d, 0x37, 0x27, 0x64, 0xa7, 0xe9, + 0x47, 0xa6, 0x55, 0xa9, 0x5b, 0x35, 0x55, 0x99, 0xa1, 0xe3, 0x63, 0xcb, 0x0a, 0xe8, 0x31, 0xb4, + 0x01, 0x6b, 0x13, 0xf4, 0x4a, 0xc3, 0x32, 0xd5, 0x78, 0xfe, 0x5b, 0x80, 0xeb, 0xf4, 0xa1, 0x6d, + 0xd8, 0xa8, 0x94, 0x5a, 0x25, 0xbb, 0xda, 0xc0, 0xaf, 0x4a, 0xad, 0x09, 0x5b, 0x9b, 0xa0, 0x4e, + 0x32, 0x4a, 0x27, 0xb8, 0xa1, 0x2a, 0xc5, 0x9f, 0xe3, 0x90, 0x9e, 0x5e, 0xae, 0xd0, 0x00, 0x36, + 0x16, 0xac, 0x33, 0xe8, 0xb3, 0xf9, 0xf9, 0x73, 0xc3, 0x46, 0xa4, 0xe7, 0x6f, 0x03, 0x95, 0x2d, + 0xdb, 0x83, 0xf5, 0xb9, 0x49, 0x86, 0xf6, 0x6e, 0xf9, 0xfb, 0xd2, 0x73, 0xb7, 0x1d, 0x8a, 0xa8, + 0x03, 0xea, 0xec, 0x94, 0x42, 0x9f, 0xce, 0x4b, 0x2f, 0xfa, 0x37, 0xe9, 0x7b, 0xef, 0xc5, 0x49, + 0x23, 0x26, 0x24, 0xc2, 0x89, 0x84, 0x76, 0xe7, 0x45, 0xae, 0xe7, 0x9b, 0xfe, 0xe8, 0x06, 0x6e, + 0xa8, 0xa6, 0xdc, 0x82, 0xe9, 0xd5, 0xb6, 0xbc, 0x26, 0x33, 0x57, 0x3a, 0xaa, 0x1f, 0x05, 0xdb, + 0xc0, 0x8f, 0xc5, 0xbe, 0xcb, 0x4f, 0x47, 0x6d, 0xa3, 0x43, 0x87, 0x85, 0x3e, 0x3d, 0x23, 0xe7, + 0x05, 0xb9, 0x2f, 0x77, 0xcf, 0x0b, 0x7d, 0x1a, 0x6e, 0xb8, 0xac, 0x30, 0xb5, 0x43, 0xb7, 0x13, + 0x82, 0xfa, 0xf4, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa8, 0xc6, 0xa7, 0xc9, 0x5b, 0x0b, 0x00, + 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ServingServiceClient is the client API for ServingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ServingServiceClient interface { + // Get information about this Feast serving. + GetFeastServingInfo(ctx context.Context, in *GetFeastServingInfoRequest, opts ...grpc.CallOption) (*GetFeastServingInfoResponse, error) + // Get online features synchronously. + GetOnlineFeatures(ctx context.Context, in *GetOnlineFeaturesRequest, opts ...grpc.CallOption) (*GetOnlineFeaturesResponse, error) + // Get batch features asynchronously. + // + // The client should check the status of the returned job periodically by + // calling ReloadJob to determine if the job has completed successfully + // or with an error. If the job completes successfully i.e. + // status = JOB_STATUS_DONE with no error, then the client can check + // the file_uris for the location to download feature values data. + // The client is assumed to have access to these file URIs. + GetBatchFeatures(ctx context.Context, in *GetBatchFeaturesRequest, opts ...grpc.CallOption) (*GetBatchFeaturesResponse, error) + // Get the latest job status for batch feature retrieval. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*GetJobResponse, error) +} + +type servingServiceClient struct { + cc *grpc.ClientConn +} + +func NewServingServiceClient(cc *grpc.ClientConn) ServingServiceClient { + return &servingServiceClient{cc} +} + +func (c *servingServiceClient) GetFeastServingInfo(ctx context.Context, in *GetFeastServingInfoRequest, opts ...grpc.CallOption) (*GetFeastServingInfoResponse, error) { + out := new(GetFeastServingInfoResponse) + err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetFeastServingInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servingServiceClient) GetOnlineFeatures(ctx context.Context, in *GetOnlineFeaturesRequest, opts ...grpc.CallOption) (*GetOnlineFeaturesResponse, error) { + out := new(GetOnlineFeaturesResponse) + err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetOnlineFeatures", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servingServiceClient) GetBatchFeatures(ctx context.Context, in *GetBatchFeaturesRequest, opts ...grpc.CallOption) (*GetBatchFeaturesResponse, error) { + out := new(GetBatchFeaturesResponse) + err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetBatchFeatures", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servingServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*GetJobResponse, error) { + out := new(GetJobResponse) + err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServingServiceServer is the server API for ServingService service. +type ServingServiceServer interface { + // Get information about this Feast serving. + GetFeastServingInfo(context.Context, *GetFeastServingInfoRequest) (*GetFeastServingInfoResponse, error) + // Get online features synchronously. + GetOnlineFeatures(context.Context, *GetOnlineFeaturesRequest) (*GetOnlineFeaturesResponse, error) + // Get batch features asynchronously. + // + // The client should check the status of the returned job periodically by + // calling ReloadJob to determine if the job has completed successfully + // or with an error. If the job completes successfully i.e. + // status = JOB_STATUS_DONE with no error, then the client can check + // the file_uris for the location to download feature values data. + // The client is assumed to have access to these file URIs. + GetBatchFeatures(context.Context, *GetBatchFeaturesRequest) (*GetBatchFeaturesResponse, error) + // Get the latest job status for batch feature retrieval. + GetJob(context.Context, *GetJobRequest) (*GetJobResponse, error) +} + +// UnimplementedServingServiceServer can be embedded to have forward compatible implementations. +type UnimplementedServingServiceServer struct { +} + +func (*UnimplementedServingServiceServer) GetFeastServingInfo(ctx context.Context, req *GetFeastServingInfoRequest) (*GetFeastServingInfoResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeastServingInfo not implemented") +} +func (*UnimplementedServingServiceServer) GetOnlineFeatures(ctx context.Context, req *GetOnlineFeaturesRequest) (*GetOnlineFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetOnlineFeatures not implemented") +} +func (*UnimplementedServingServiceServer) GetBatchFeatures(ctx context.Context, req *GetBatchFeaturesRequest) (*GetBatchFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetBatchFeatures not implemented") +} +func (*UnimplementedServingServiceServer) GetJob(ctx context.Context, req *GetJobRequest) (*GetJobResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetJob not implemented") +} + +func RegisterServingServiceServer(s *grpc.Server, srv ServingServiceServer) { + s.RegisterService(&_ServingService_serviceDesc, srv) +} + +func _ServingService_GetFeastServingInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeastServingInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServingServiceServer).GetFeastServingInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.serving.ServingService/GetFeastServingInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServingServiceServer).GetFeastServingInfo(ctx, req.(*GetFeastServingInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServingService_GetOnlineFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOnlineFeaturesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServingServiceServer).GetOnlineFeatures(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.serving.ServingService/GetOnlineFeatures", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServingServiceServer).GetOnlineFeatures(ctx, req.(*GetOnlineFeaturesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServingService_GetBatchFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBatchFeaturesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServingServiceServer).GetBatchFeatures(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.serving.ServingService/GetBatchFeatures", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServingServiceServer).GetBatchFeatures(ctx, req.(*GetBatchFeaturesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServingService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServingServiceServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.serving.ServingService/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServingServiceServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ServingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "feast.serving.ServingService", + HandlerType: (*ServingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeastServingInfo", + Handler: _ServingService_GetFeastServingInfo_Handler, + }, + { + MethodName: "GetOnlineFeatures", + Handler: _ServingService_GetOnlineFeatures_Handler, + }, + { + MethodName: "GetBatchFeatures", + Handler: _ServingService_GetBatchFeatures_Handler, + }, + { + MethodName: "GetJob", + Handler: _ServingService_GetJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "feast/serving/ServingService.proto", +} diff --git a/sdk/go/protos/feast/storage/Redis.pb.go b/sdk/go/protos/feast/storage/Redis.pb.go new file mode 100644 index 00000000000..55cf42becd8 --- /dev/null +++ b/sdk/go/protos/feast/storage/Redis.pb.go @@ -0,0 +1,95 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/storage/Redis.proto + +package storage + +import ( + fmt "fmt" + types "github.com/gojek/feast/sdk/go/protos/feast/types" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type RedisKey struct { + // FeatureSet this row belongs to, this is defined as featureSetName:version. + FeatureSet string `protobuf:"bytes,2,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + // List of fields containing entity names and their respective values + // contained within this feature row. + Entities []*types.Field `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RedisKey) Reset() { *m = RedisKey{} } +func (m *RedisKey) String() string { return proto.CompactTextString(m) } +func (*RedisKey) ProtoMessage() {} +func (*RedisKey) Descriptor() ([]byte, []int) { + return fileDescriptor_64e898a359fc9e5d, []int{0} +} + +func (m *RedisKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RedisKey.Unmarshal(m, b) +} +func (m *RedisKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RedisKey.Marshal(b, m, deterministic) +} +func (m *RedisKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedisKey.Merge(m, src) +} +func (m *RedisKey) XXX_Size() int { + return xxx_messageInfo_RedisKey.Size(m) +} +func (m *RedisKey) XXX_DiscardUnknown() { + xxx_messageInfo_RedisKey.DiscardUnknown(m) +} + +var xxx_messageInfo_RedisKey proto.InternalMessageInfo + +func (m *RedisKey) GetFeatureSet() string { + if m != nil { + return m.FeatureSet + } + return "" +} + +func (m *RedisKey) GetEntities() []*types.Field { + if m != nil { + return m.Entities + } + return nil +} + +func init() { + proto.RegisterType((*RedisKey)(nil), "feast.storage.RedisKey") +} + +func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_64e898a359fc9e5d) } + +var fileDescriptor_64e898a359fc9e5d = []byte{ + // 193 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0x31, 0xaf, 0x82, 0x40, + 0x10, 0x84, 0xf3, 0x1e, 0xc9, 0x0b, 0xef, 0x88, 0xcd, 0x35, 0xa2, 0x8d, 0xc4, 0x8a, 0xea, 0x36, + 0xc1, 0x7f, 0x40, 0x61, 0x63, 0xa1, 0xc1, 0x4e, 0x0b, 0x03, 0xb2, 0x9c, 0x27, 0xea, 0x11, 0x6e, + 0x29, 0xf8, 0xf7, 0xc6, 0x85, 0x98, 0xd0, 0xee, 0xcc, 0xce, 0x37, 0x23, 0x16, 0x15, 0xe6, 0x8e, + 0xc0, 0x91, 0x6d, 0x73, 0x8d, 0x90, 0x61, 0x69, 0x9c, 0x6a, 0x5a, 0x4b, 0x56, 0xce, 0x58, 0x52, + 0xa3, 0xb4, 0x9c, 0x0f, 0x4e, 0xea, 0x1b, 0x74, 0xb0, 0x35, 0xf8, 0x28, 0x07, 0xdf, 0xfa, 0x2c, + 0x7c, 0x7e, 0xdb, 0x61, 0x2f, 0x57, 0x22, 0xa8, 0x30, 0xa7, 0xae, 0xc5, 0x8b, 0x43, 0x0a, 0x7f, + 0xa3, 0x9f, 0xf8, 0x3f, 0x13, 0xe3, 0xe9, 0x88, 0x24, 0x95, 0xf0, 0xf1, 0x45, 0x86, 0x0c, 0xba, + 0xd0, 0x8b, 0xbc, 0x38, 0x48, 0xa4, 0x1a, 0x38, 0x1c, 0xac, 0x38, 0x38, 0xfb, 0x7a, 0xd2, 0xbd, + 0x98, 0xd6, 0x48, 0x05, 0xb3, 0x0e, 0x1f, 0xf2, 0x29, 0xd1, 0x86, 0x6e, 0x5d, 0xa1, 0xae, 0xf6, + 0x09, 0xda, 0xde, 0xb1, 0x86, 0x71, 0x4d, 0x59, 0x83, 0xb6, 0xc0, 0xf5, 0x1c, 0x4c, 0x16, 0x16, + 0x7f, 0x7c, 0xdd, 0xbc, 0x03, 0x00, 0x00, 0xff, 0xff, 0x68, 0x2b, 0x58, 0x50, 0xf9, 0x00, 0x00, + 0x00, +} diff --git a/sdk/go/protos/feast/types/FeatureRow.pb.go b/sdk/go/protos/feast/types/FeatureRow.pb.go new file mode 100644 index 00000000000..dd9aa6b8c96 --- /dev/null +++ b/sdk/go/protos/feast/types/FeatureRow.pb.go @@ -0,0 +1,109 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/types/FeatureRow.proto + +package types + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type FeatureRow struct { + // Fields in the feature row. + Fields []*Field `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + // Timestamp of the feature row. While the actual definition of this timestamp may vary + // depending on the upstream feature creation pipelines, this is the timestamp that Feast + // will use to perform joins, determine latest values, and coalesce rows. + EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` + // Complete reference to the featureSet this featureRow belongs to, in the form of + // featureSetName:version. This value will be used by the feast ingestion job to filter + // rows, and write the values to the correct tables. + FeatureSet string `protobuf:"bytes,6,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureRow) Reset() { *m = FeatureRow{} } +func (m *FeatureRow) String() string { return proto.CompactTextString(m) } +func (*FeatureRow) ProtoMessage() {} +func (*FeatureRow) Descriptor() ([]byte, []int) { + return fileDescriptor_fbbea9c89787d1c7, []int{0} +} + +func (m *FeatureRow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureRow.Unmarshal(m, b) +} +func (m *FeatureRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureRow.Marshal(b, m, deterministic) +} +func (m *FeatureRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureRow.Merge(m, src) +} +func (m *FeatureRow) XXX_Size() int { + return xxx_messageInfo_FeatureRow.Size(m) +} +func (m *FeatureRow) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureRow.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureRow proto.InternalMessageInfo + +func (m *FeatureRow) GetFields() []*Field { + if m != nil { + return m.Fields + } + return nil +} + +func (m *FeatureRow) GetEventTimestamp() *timestamp.Timestamp { + if m != nil { + return m.EventTimestamp + } + return nil +} + +func (m *FeatureRow) GetFeatureSet() string { + if m != nil { + return m.FeatureSet + } + return "" +} + +func init() { + proto.RegisterType((*FeatureRow)(nil), "feast.types.FeatureRow") +} + +func init() { proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_fbbea9c89787d1c7) } + +var fileDescriptor_fbbea9c89787d1c7 = []byte{ + // 238 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xcd, 0x4a, 0xc3, 0x40, + 0x10, 0xc7, 0x89, 0x85, 0x80, 0x1b, 0xb0, 0xb0, 0x17, 0x43, 0x10, 0x1a, 0x3c, 0x05, 0x0f, 0x33, + 0x52, 0xdf, 0xa0, 0x82, 0xe7, 0x12, 0x3d, 0x79, 0x29, 0x89, 0x9d, 0xac, 0xb1, 0x8d, 0x13, 0xba, + 0x13, 0xc5, 0x97, 0xf1, 0x59, 0x65, 0x77, 0xdb, 0x26, 0x1e, 0x77, 0x7e, 0x33, 0xff, 0x8f, 0x55, + 0x37, 0x0d, 0x55, 0x56, 0x50, 0x7e, 0x7a, 0xb2, 0xf8, 0x44, 0x95, 0x0c, 0x07, 0x2a, 0xf9, 0x1b, + 0xfa, 0x03, 0x0b, 0xeb, 0xc4, 0x53, 0xf0, 0x34, 0x5b, 0x18, 0x66, 0xb3, 0x27, 0xf4, 0xa8, 0x1e, + 0x1a, 0x94, 0xb6, 0x23, 0x2b, 0x55, 0xd7, 0x87, 0xed, 0xec, 0xfa, 0x9f, 0x56, 0x4b, 0xfb, 0x6d, + 0x00, 0xb7, 0xbf, 0x91, 0x52, 0xa3, 0xb6, 0xbe, 0x53, 0x71, 0xe3, 0xa8, 0x4d, 0x2f, 0xf2, 0x59, + 0x91, 0x2c, 0x35, 0x4c, 0x6c, 0xc0, 0x1f, 0x96, 0xc7, 0x0d, 0xfd, 0xa8, 0xe6, 0xf4, 0x45, 0x9f, + 0xb2, 0x39, 0x9b, 0xa5, 0xb3, 0x3c, 0x2a, 0x92, 0x65, 0x06, 0x21, 0x0e, 0x9c, 0xe2, 0xc0, 0xcb, + 0x69, 0xa3, 0xbc, 0xf2, 0x27, 0xe7, 0xb7, 0x5e, 0x28, 0x57, 0xc4, 0xd9, 0x6f, 0x2c, 0x49, 0x1a, + 0xe7, 0x51, 0x71, 0x59, 0xaa, 0xe3, 0xe8, 0x99, 0x64, 0xb5, 0x56, 0xd3, 0xa6, 0xab, 0xf9, 0x18, + 0x76, 0xed, 0xd4, 0x5f, 0xef, 0x4d, 0x2b, 0xef, 0x43, 0x0d, 0x6f, 0xdc, 0xa1, 0xe1, 0x0f, 0xda, + 0x61, 0xa8, 0x6a, 0xb7, 0x3b, 0x34, 0x1c, 0x7e, 0xc4, 0xe2, 0xa4, 0x7e, 0x1d, 0xfb, 0xd9, 0xc3, + 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2f, 0x88, 0x1b, 0x19, 0x60, 0x01, 0x00, 0x00, +} diff --git a/protos/generated/go/feast/types/FeatureRowExtended.pb.go b/sdk/go/protos/feast/types/FeatureRowExtended.pb.go similarity index 62% rename from protos/generated/go/feast/types/FeatureRowExtended.pb.go rename to sdk/go/protos/feast/types/FeatureRowExtended.pb.go index ad89f27db23..734c98687a0 100644 --- a/protos/generated/go/feast/types/FeatureRowExtended.pb.go +++ b/sdk/go/protos/feast/types/FeatureRowExtended.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/types/FeatureRowExtended.proto -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" +package types -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,13 +19,13 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type Error struct { Cause string `protobuf:"bytes,1,opt,name=cause,proto3" json:"cause,omitempty"` Transform string `protobuf:"bytes,2,opt,name=transform,proto3" json:"transform,omitempty"` Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` - StackTrace string `protobuf:"bytes,4,opt,name=stackTrace,proto3" json:"stackTrace,omitempty"` + StackTrace string `protobuf:"bytes,4,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -33,16 +35,17 @@ func (m *Error) Reset() { *m = Error{} } func (m *Error) String() string { return proto.CompactTextString(m) } func (*Error) ProtoMessage() {} func (*Error) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{0} + return fileDescriptor_7823aa2c72575793, []int{0} } + func (m *Error) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Error.Unmarshal(m, b) } func (m *Error) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Error.Marshal(b, m, deterministic) } -func (dst *Error) XXX_Merge(src proto.Message) { - xxx_messageInfo_Error.Merge(dst, src) +func (m *Error) XXX_Merge(src proto.Message) { + xxx_messageInfo_Error.Merge(m, src) } func (m *Error) XXX_Size() int { return xxx_messageInfo_Error.Size(m) @@ -93,16 +96,17 @@ func (m *Attempt) Reset() { *m = Attempt{} } func (m *Attempt) String() string { return proto.CompactTextString(m) } func (*Attempt) ProtoMessage() {} func (*Attempt) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{1} + return fileDescriptor_7823aa2c72575793, []int{1} } + func (m *Attempt) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Attempt.Unmarshal(m, b) } func (m *Attempt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Attempt.Marshal(b, m, deterministic) } -func (dst *Attempt) XXX_Merge(src proto.Message) { - xxx_messageInfo_Attempt.Merge(dst, src) +func (m *Attempt) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attempt.Merge(m, src) } func (m *Attempt) XXX_Size() int { return xxx_messageInfo_Attempt.Size(m) @@ -129,8 +133,8 @@ func (m *Attempt) GetError() *Error { type FeatureRowExtended struct { Row *FeatureRow `protobuf:"bytes,1,opt,name=row,proto3" json:"row,omitempty"` - LastAttempt *Attempt `protobuf:"bytes,2,opt,name=lastAttempt,proto3" json:"lastAttempt,omitempty"` - FirstSeen *timestamp.Timestamp `protobuf:"bytes,3,opt,name=firstSeen,proto3" json:"firstSeen,omitempty"` + LastAttempt *Attempt `protobuf:"bytes,2,opt,name=last_attempt,json=lastAttempt,proto3" json:"last_attempt,omitempty"` + FirstSeen *timestamp.Timestamp `protobuf:"bytes,3,opt,name=first_seen,json=firstSeen,proto3" json:"first_seen,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -140,16 +144,17 @@ func (m *FeatureRowExtended) Reset() { *m = FeatureRowExtended{} } func (m *FeatureRowExtended) String() string { return proto.CompactTextString(m) } func (*FeatureRowExtended) ProtoMessage() {} func (*FeatureRowExtended) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{2} + return fileDescriptor_7823aa2c72575793, []int{2} } + func (m *FeatureRowExtended) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRowExtended.Unmarshal(m, b) } func (m *FeatureRowExtended) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureRowExtended.Marshal(b, m, deterministic) } -func (dst *FeatureRowExtended) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureRowExtended.Merge(dst, src) +func (m *FeatureRowExtended) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureRowExtended.Merge(m, src) } func (m *FeatureRowExtended) XXX_Size() int { return xxx_messageInfo_FeatureRowExtended.Size(m) @@ -188,31 +193,31 @@ func init() { } func init() { - proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_FeatureRowExtended_bfd3c37956d1a040) -} - -var fileDescriptor_FeatureRowExtended_bfd3c37956d1a040 = []byte{ - // 338 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xc1, 0x6b, 0xea, 0x40, - 0x10, 0xc6, 0xf1, 0xf9, 0xf2, 0x7c, 0x4e, 0x6e, 0x8b, 0x60, 0x08, 0xd2, 0x16, 0xe9, 0xc1, 0x5e, - 0x76, 0xc1, 0x82, 0xf4, 0x5a, 0xc1, 0x5e, 0x5b, 0xb6, 0x9e, 0x7a, 0x28, 0xac, 0x71, 0xb2, 0xb5, - 0x9a, 0x6c, 0xd8, 0x9d, 0x60, 0xfb, 0x67, 0xf5, 0x3f, 0x2c, 0xee, 0x6a, 0x8d, 0xd8, 0x5b, 0x66, - 0xbf, 0xdf, 0xcc, 0x7c, 0x99, 0x0f, 0xae, 0x73, 0x54, 0x8e, 0x04, 0x7d, 0x56, 0xe8, 0xc4, 0x03, - 0x2a, 0xaa, 0x2d, 0x4a, 0xb3, 0x9d, 0x7d, 0x10, 0x96, 0x4b, 0x5c, 0xf2, 0xca, 0x1a, 0x32, 0x2c, - 0xf6, 0x14, 0xf7, 0x54, 0x7a, 0xa9, 0x8d, 0xd1, 0x1b, 0x14, 0x5e, 0x5a, 0xd4, 0xb9, 0xa0, 0x55, - 0x81, 0x8e, 0x54, 0x51, 0x05, 0x3a, 0x1d, 0xfc, 0x3e, 0x33, 0xa8, 0xc3, 0x1a, 0xa2, 0x99, 0xb5, - 0xc6, 0xb2, 0x1e, 0x44, 0x99, 0xaa, 0x1d, 0x26, 0xad, 0xab, 0xd6, 0xa8, 0x2b, 0x43, 0xc1, 0x06, - 0xd0, 0x25, 0xab, 0x4a, 0x97, 0x1b, 0x5b, 0x24, 0x7f, 0xbc, 0x72, 0x7c, 0x60, 0x09, 0x74, 0x0a, - 0x74, 0x4e, 0x69, 0x4c, 0xda, 0x5e, 0x3b, 0x94, 0xec, 0x02, 0xc0, 0x91, 0xca, 0xd6, 0x73, 0xab, - 0x32, 0x4c, 0xfe, 0x7a, 0xb1, 0xf1, 0x32, 0x7c, 0x84, 0xce, 0x3d, 0x11, 0x16, 0x15, 0xb1, 0x14, - 0xfe, 0xab, 0xf0, 0xe9, 0xfc, 0xee, 0x48, 0xfe, 0xd4, 0x6c, 0x04, 0x11, 0xee, 0xdc, 0xf9, 0xd5, - 0xf1, 0x98, 0xf1, 0xc6, 0x9f, 0x73, 0xef, 0x5b, 0x06, 0x60, 0xf8, 0xd5, 0x02, 0x76, 0x7e, 0x30, - 0x76, 0x03, 0x6d, 0x6b, 0xb6, 0x7e, 0x6e, 0x3c, 0xee, 0x9f, 0xb4, 0x1f, 0x69, 0xb9, 0x63, 0xd8, - 0x04, 0xe2, 0x8d, 0x72, 0xb4, 0xb7, 0xb5, 0xdf, 0xd8, 0x3b, 0x69, 0xd9, 0x6b, 0xb2, 0x09, 0xb2, - 0x3b, 0xe8, 0xe6, 0x2b, 0xeb, 0xe8, 0x19, 0xb1, 0xf4, 0x67, 0x88, 0xc7, 0x29, 0x0f, 0xa1, 0xf0, - 0x43, 0x28, 0x7c, 0x7e, 0x08, 0x45, 0x1e, 0xe1, 0xe9, 0x2b, 0x34, 0x93, 0x9c, 0xf6, 0xcf, 0xfd, - 0x3f, 0xed, 0xfa, 0x5f, 0x26, 0x7a, 0x45, 0x6f, 0xf5, 0x82, 0x67, 0xa6, 0x10, 0xda, 0xbc, 0xe3, - 0x5a, 0x84, 0x48, 0xfd, 0x74, 0x27, 0x34, 0x96, 0x68, 0x15, 0xe1, 0x52, 0x68, 0x23, 0x1a, 0x61, - 0x2f, 0xfe, 0x79, 0xe0, 0xf6, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x99, 0x23, 0x9a, 0xf3, 0x56, 0x02, - 0x00, 0x00, + proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_7823aa2c72575793) +} + +var fileDescriptor_7823aa2c72575793 = []byte{ + // 345 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xcf, 0x4e, 0xc2, 0x40, + 0x10, 0xc6, 0x83, 0x58, 0x91, 0xa9, 0xa7, 0x0d, 0x09, 0x4d, 0x43, 0x82, 0x21, 0x1e, 0xf0, 0xb2, + 0x6b, 0xf0, 0x60, 0x3c, 0x4a, 0x82, 0x57, 0x4d, 0xe5, 0x60, 0xbc, 0x90, 0xa5, 0x4c, 0x2b, 0x42, + 0xbb, 0xcd, 0xee, 0x54, 0xf4, 0xb9, 0x7c, 0x41, 0xd3, 0x5d, 0x2a, 0x10, 0xbc, 0xed, 0xcc, 0xf7, + 0x9b, 0x9d, 0x3f, 0x1f, 0x5c, 0x25, 0x28, 0x0d, 0x09, 0xfa, 0x2e, 0xd0, 0x88, 0x47, 0x94, 0x54, + 0x6a, 0x8c, 0xd4, 0x66, 0xf2, 0x45, 0x98, 0x2f, 0x70, 0xc1, 0x0b, 0xad, 0x48, 0x31, 0xdf, 0x52, + 0xdc, 0x52, 0x61, 0x3f, 0x55, 0x2a, 0x5d, 0xa3, 0xb0, 0xd2, 0xbc, 0x4c, 0x04, 0x2d, 0x33, 0x34, + 0x24, 0xb3, 0xc2, 0xd1, 0x61, 0xef, 0xff, 0x3f, 0x9d, 0x3a, 0xf8, 0x04, 0x6f, 0xa2, 0xb5, 0xd2, + 0xac, 0x03, 0x5e, 0x2c, 0x4b, 0x83, 0x41, 0xe3, 0xb2, 0x31, 0x6c, 0x47, 0x2e, 0x60, 0x3d, 0x68, + 0x93, 0x96, 0xb9, 0x49, 0x94, 0xce, 0x82, 0x13, 0xab, 0xec, 0x12, 0x2c, 0x80, 0x56, 0x86, 0xc6, + 0xc8, 0x14, 0x83, 0xa6, 0xd5, 0xea, 0x90, 0xf5, 0xc1, 0x37, 0x24, 0xe3, 0xd5, 0x8c, 0xb4, 0x8c, + 0x31, 0x38, 0xb5, 0x2a, 0xd8, 0xd4, 0xb4, 0xca, 0x0c, 0x9e, 0xa0, 0xf5, 0x40, 0x84, 0x59, 0x41, + 0x2c, 0x84, 0x73, 0xe9, 0x9e, 0xc6, 0x36, 0xf7, 0xa2, 0xbf, 0x98, 0x0d, 0xc1, 0xc3, 0x6a, 0x3c, + 0xdb, 0xdb, 0x1f, 0x31, 0xbe, 0xb7, 0x3a, 0xb7, 0x83, 0x47, 0x0e, 0x18, 0xfc, 0x34, 0x80, 0x1d, + 0x5f, 0x8c, 0x5d, 0x43, 0x53, 0xab, 0x8d, 0xfd, 0xd7, 0x1f, 0x75, 0x0f, 0xca, 0x77, 0x74, 0x54, + 0x31, 0xec, 0x0e, 0x2e, 0xd6, 0xd2, 0xd0, 0x6c, 0xdb, 0x7c, 0xdb, 0xb2, 0x73, 0x50, 0xb3, 0x9d, + 0x39, 0xf2, 0x2b, 0xb2, 0x5e, 0xe0, 0x1e, 0x20, 0x59, 0x6a, 0x43, 0x33, 0x83, 0x98, 0xdb, 0x4b, + 0xf8, 0xa3, 0x90, 0x3b, 0x5f, 0x78, 0xed, 0x0b, 0x9f, 0xd6, 0xbe, 0x44, 0x6d, 0x4b, 0xbf, 0x20, + 0xe6, 0xe3, 0x57, 0xd8, 0x37, 0x73, 0xdc, 0x3d, 0xde, 0xe0, 0xb9, 0xaa, 0x7f, 0xbb, 0x49, 0x97, + 0xf4, 0x5e, 0xce, 0x79, 0xac, 0x32, 0x91, 0xaa, 0x0f, 0x5c, 0x09, 0xe7, 0xaa, 0x59, 0xac, 0x44, + 0xaa, 0x9c, 0xf9, 0x46, 0xec, 0x39, 0x3d, 0x3f, 0xb3, 0xb9, 0xdb, 0xdf, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x9d, 0x45, 0x64, 0x53, 0x53, 0x02, 0x00, 0x00, } diff --git a/sdk/go/protos/feast/types/Field.pb.go b/sdk/go/protos/feast/types/Field.pb.go new file mode 100644 index 00000000000..345b5259997 --- /dev/null +++ b/sdk/go/protos/feast/types/Field.pb.go @@ -0,0 +1,89 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/types/Field.proto + +package types + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Field struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field) Reset() { *m = Field{} } +func (m *Field) String() string { return proto.CompactTextString(m) } +func (*Field) ProtoMessage() {} +func (*Field) Descriptor() ([]byte, []int) { + return fileDescriptor_8c568a78dfaa9ca9, []int{0} +} + +func (m *Field) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field.Unmarshal(m, b) +} +func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field.Marshal(b, m, deterministic) +} +func (m *Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field.Merge(m, src) +} +func (m *Field) XXX_Size() int { + return xxx_messageInfo_Field.Size(m) +} +func (m *Field) XXX_DiscardUnknown() { + xxx_messageInfo_Field.DiscardUnknown(m) +} + +var xxx_messageInfo_Field proto.InternalMessageInfo + +func (m *Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Field) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Field)(nil), "feast.types.Field") +} + +func init() { proto.RegisterFile("feast/types/Field.proto", fileDescriptor_8c568a78dfaa9ca9) } + +var fileDescriptor_8c568a78dfaa9ca9 = []byte{ + // 165 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0x4b, 0x4d, 0x2c, + 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0xcb, 0x4c, 0xcd, 0x49, 0xd1, 0x2b, 0x28, + 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x06, 0x4b, 0xe8, 0x81, 0x25, 0xa4, 0x50, 0x54, 0x85, 0x25, 0xe6, + 0x94, 0xa6, 0x42, 0x54, 0x29, 0xb9, 0x72, 0xb1, 0x82, 0x35, 0x09, 0x09, 0x71, 0xb1, 0xe4, 0x25, + 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0x81, 0xd9, 0x42, 0x1a, 0x5c, 0xac, 0x65, + 0x20, 0xb5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, 0x42, 0x7a, 0x48, 0x46, 0xea, 0x81, 0x4d, + 0x09, 0x82, 0x28, 0x70, 0xf2, 0xe6, 0x42, 0xb6, 0xce, 0x89, 0x0b, 0x6c, 0x66, 0x00, 0xc8, 0x86, + 0x28, 0x83, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xf4, 0xfc, 0xac, + 0xd4, 0x6c, 0x7d, 0x88, 0x5b, 0x8a, 0x53, 0xb2, 0xf5, 0xd3, 0xf3, 0xf5, 0xc1, 0xce, 0x28, 0xd6, + 0x47, 0x72, 0x5f, 0x12, 0x1b, 0x58, 0xcc, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xef, 0xe8, 0xff, + 0x05, 0xdb, 0x00, 0x00, 0x00, +} diff --git a/sdk/go/protos/feast/types/Value.pb.go b/sdk/go/protos/feast/types/Value.pb.go new file mode 100644 index 00000000000..3f9808b994f --- /dev/null +++ b/sdk/go/protos/feast/types/Value.pb.go @@ -0,0 +1,709 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: feast/types/Value.proto + +package types + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type ValueType_Enum int32 + +const ( + ValueType_INVALID ValueType_Enum = 0 + ValueType_BYTES ValueType_Enum = 1 + ValueType_STRING ValueType_Enum = 2 + ValueType_INT32 ValueType_Enum = 3 + ValueType_INT64 ValueType_Enum = 4 + ValueType_DOUBLE ValueType_Enum = 5 + ValueType_FLOAT ValueType_Enum = 6 + ValueType_BOOL ValueType_Enum = 7 + ValueType_BYTES_LIST ValueType_Enum = 11 + ValueType_STRING_LIST ValueType_Enum = 12 + ValueType_INT32_LIST ValueType_Enum = 13 + ValueType_INT64_LIST ValueType_Enum = 14 + ValueType_DOUBLE_LIST ValueType_Enum = 15 + ValueType_FLOAT_LIST ValueType_Enum = 16 + ValueType_BOOL_LIST ValueType_Enum = 17 +) + +var ValueType_Enum_name = map[int32]string{ + 0: "INVALID", + 1: "BYTES", + 2: "STRING", + 3: "INT32", + 4: "INT64", + 5: "DOUBLE", + 6: "FLOAT", + 7: "BOOL", + 11: "BYTES_LIST", + 12: "STRING_LIST", + 13: "INT32_LIST", + 14: "INT64_LIST", + 15: "DOUBLE_LIST", + 16: "FLOAT_LIST", + 17: "BOOL_LIST", +} + +var ValueType_Enum_value = map[string]int32{ + "INVALID": 0, + "BYTES": 1, + "STRING": 2, + "INT32": 3, + "INT64": 4, + "DOUBLE": 5, + "FLOAT": 6, + "BOOL": 7, + "BYTES_LIST": 11, + "STRING_LIST": 12, + "INT32_LIST": 13, + "INT64_LIST": 14, + "DOUBLE_LIST": 15, + "FLOAT_LIST": 16, + "BOOL_LIST": 17, +} + +func (x ValueType_Enum) String() string { + return proto.EnumName(ValueType_Enum_name, int32(x)) +} + +func (ValueType_Enum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{0, 0} +} + +type ValueType struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueType) Reset() { *m = ValueType{} } +func (m *ValueType) String() string { return proto.CompactTextString(m) } +func (*ValueType) ProtoMessage() {} +func (*ValueType) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{0} +} + +func (m *ValueType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueType.Unmarshal(m, b) +} +func (m *ValueType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueType.Marshal(b, m, deterministic) +} +func (m *ValueType) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueType.Merge(m, src) +} +func (m *ValueType) XXX_Size() int { + return xxx_messageInfo_ValueType.Size(m) +} +func (m *ValueType) XXX_DiscardUnknown() { + xxx_messageInfo_ValueType.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueType proto.InternalMessageInfo + +type Value struct { + // ValueType is referenced by the metadata types, FeatureInfo and EntityInfo. + // The enum values do not have to match the oneof val field ids, but they should. + // + // Types that are valid to be assigned to Val: + // *Value_BytesVal + // *Value_StringVal + // *Value_Int32Val + // *Value_Int64Val + // *Value_DoubleVal + // *Value_FloatVal + // *Value_BoolVal + // *Value_BytesListVal + // *Value_StringListVal + // *Value_Int32ListVal + // *Value_Int64ListVal + // *Value_DoubleListVal + // *Value_FloatListVal + // *Value_BoolListVal + Val isValue_Val `protobuf_oneof:"val"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{1} +} + +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (m *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(m, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_Val interface { + isValue_Val() +} + +type Value_BytesVal struct { + BytesVal []byte `protobuf:"bytes,1,opt,name=bytes_val,json=bytesVal,proto3,oneof"` +} + +type Value_StringVal struct { + StringVal string `protobuf:"bytes,2,opt,name=string_val,json=stringVal,proto3,oneof"` +} + +type Value_Int32Val struct { + Int32Val int32 `protobuf:"varint,3,opt,name=int32_val,json=int32Val,proto3,oneof"` +} + +type Value_Int64Val struct { + Int64Val int64 `protobuf:"varint,4,opt,name=int64_val,json=int64Val,proto3,oneof"` +} + +type Value_DoubleVal struct { + DoubleVal float64 `protobuf:"fixed64,5,opt,name=double_val,json=doubleVal,proto3,oneof"` +} + +type Value_FloatVal struct { + FloatVal float32 `protobuf:"fixed32,6,opt,name=float_val,json=floatVal,proto3,oneof"` +} + +type Value_BoolVal struct { + BoolVal bool `protobuf:"varint,7,opt,name=bool_val,json=boolVal,proto3,oneof"` +} + +type Value_BytesListVal struct { + BytesListVal *BytesList `protobuf:"bytes,11,opt,name=bytes_list_val,json=bytesListVal,proto3,oneof"` +} + +type Value_StringListVal struct { + StringListVal *StringList `protobuf:"bytes,12,opt,name=string_list_val,json=stringListVal,proto3,oneof"` +} + +type Value_Int32ListVal struct { + Int32ListVal *Int32List `protobuf:"bytes,13,opt,name=int32_list_val,json=int32ListVal,proto3,oneof"` +} + +type Value_Int64ListVal struct { + Int64ListVal *Int64List `protobuf:"bytes,14,opt,name=int64_list_val,json=int64ListVal,proto3,oneof"` +} + +type Value_DoubleListVal struct { + DoubleListVal *DoubleList `protobuf:"bytes,15,opt,name=double_list_val,json=doubleListVal,proto3,oneof"` +} + +type Value_FloatListVal struct { + FloatListVal *FloatList `protobuf:"bytes,16,opt,name=float_list_val,json=floatListVal,proto3,oneof"` +} + +type Value_BoolListVal struct { + BoolListVal *BoolList `protobuf:"bytes,17,opt,name=bool_list_val,json=boolListVal,proto3,oneof"` +} + +func (*Value_BytesVal) isValue_Val() {} + +func (*Value_StringVal) isValue_Val() {} + +func (*Value_Int32Val) isValue_Val() {} + +func (*Value_Int64Val) isValue_Val() {} + +func (*Value_DoubleVal) isValue_Val() {} + +func (*Value_FloatVal) isValue_Val() {} + +func (*Value_BoolVal) isValue_Val() {} + +func (*Value_BytesListVal) isValue_Val() {} + +func (*Value_StringListVal) isValue_Val() {} + +func (*Value_Int32ListVal) isValue_Val() {} + +func (*Value_Int64ListVal) isValue_Val() {} + +func (*Value_DoubleListVal) isValue_Val() {} + +func (*Value_FloatListVal) isValue_Val() {} + +func (*Value_BoolListVal) isValue_Val() {} + +func (m *Value) GetVal() isValue_Val { + if m != nil { + return m.Val + } + return nil +} + +func (m *Value) GetBytesVal() []byte { + if x, ok := m.GetVal().(*Value_BytesVal); ok { + return x.BytesVal + } + return nil +} + +func (m *Value) GetStringVal() string { + if x, ok := m.GetVal().(*Value_StringVal); ok { + return x.StringVal + } + return "" +} + +func (m *Value) GetInt32Val() int32 { + if x, ok := m.GetVal().(*Value_Int32Val); ok { + return x.Int32Val + } + return 0 +} + +func (m *Value) GetInt64Val() int64 { + if x, ok := m.GetVal().(*Value_Int64Val); ok { + return x.Int64Val + } + return 0 +} + +func (m *Value) GetDoubleVal() float64 { + if x, ok := m.GetVal().(*Value_DoubleVal); ok { + return x.DoubleVal + } + return 0 +} + +func (m *Value) GetFloatVal() float32 { + if x, ok := m.GetVal().(*Value_FloatVal); ok { + return x.FloatVal + } + return 0 +} + +func (m *Value) GetBoolVal() bool { + if x, ok := m.GetVal().(*Value_BoolVal); ok { + return x.BoolVal + } + return false +} + +func (m *Value) GetBytesListVal() *BytesList { + if x, ok := m.GetVal().(*Value_BytesListVal); ok { + return x.BytesListVal + } + return nil +} + +func (m *Value) GetStringListVal() *StringList { + if x, ok := m.GetVal().(*Value_StringListVal); ok { + return x.StringListVal + } + return nil +} + +func (m *Value) GetInt32ListVal() *Int32List { + if x, ok := m.GetVal().(*Value_Int32ListVal); ok { + return x.Int32ListVal + } + return nil +} + +func (m *Value) GetInt64ListVal() *Int64List { + if x, ok := m.GetVal().(*Value_Int64ListVal); ok { + return x.Int64ListVal + } + return nil +} + +func (m *Value) GetDoubleListVal() *DoubleList { + if x, ok := m.GetVal().(*Value_DoubleListVal); ok { + return x.DoubleListVal + } + return nil +} + +func (m *Value) GetFloatListVal() *FloatList { + if x, ok := m.GetVal().(*Value_FloatListVal); ok { + return x.FloatListVal + } + return nil +} + +func (m *Value) GetBoolListVal() *BoolList { + if x, ok := m.GetVal().(*Value_BoolListVal); ok { + return x.BoolListVal + } + return nil +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Value) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*Value_BytesVal)(nil), + (*Value_StringVal)(nil), + (*Value_Int32Val)(nil), + (*Value_Int64Val)(nil), + (*Value_DoubleVal)(nil), + (*Value_FloatVal)(nil), + (*Value_BoolVal)(nil), + (*Value_BytesListVal)(nil), + (*Value_StringListVal)(nil), + (*Value_Int32ListVal)(nil), + (*Value_Int64ListVal)(nil), + (*Value_DoubleListVal)(nil), + (*Value_FloatListVal)(nil), + (*Value_BoolListVal)(nil), + } +} + +type BytesList struct { + Val [][]byte `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BytesList) Reset() { *m = BytesList{} } +func (m *BytesList) String() string { return proto.CompactTextString(m) } +func (*BytesList) ProtoMessage() {} +func (*BytesList) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{2} +} + +func (m *BytesList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BytesList.Unmarshal(m, b) +} +func (m *BytesList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BytesList.Marshal(b, m, deterministic) +} +func (m *BytesList) XXX_Merge(src proto.Message) { + xxx_messageInfo_BytesList.Merge(m, src) +} +func (m *BytesList) XXX_Size() int { + return xxx_messageInfo_BytesList.Size(m) +} +func (m *BytesList) XXX_DiscardUnknown() { + xxx_messageInfo_BytesList.DiscardUnknown(m) +} + +var xxx_messageInfo_BytesList proto.InternalMessageInfo + +func (m *BytesList) GetVal() [][]byte { + if m != nil { + return m.Val + } + return nil +} + +type StringList struct { + Val []string `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringList) Reset() { *m = StringList{} } +func (m *StringList) String() string { return proto.CompactTextString(m) } +func (*StringList) ProtoMessage() {} +func (*StringList) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{3} +} + +func (m *StringList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringList.Unmarshal(m, b) +} +func (m *StringList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringList.Marshal(b, m, deterministic) +} +func (m *StringList) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringList.Merge(m, src) +} +func (m *StringList) XXX_Size() int { + return xxx_messageInfo_StringList.Size(m) +} +func (m *StringList) XXX_DiscardUnknown() { + xxx_messageInfo_StringList.DiscardUnknown(m) +} + +var xxx_messageInfo_StringList proto.InternalMessageInfo + +func (m *StringList) GetVal() []string { + if m != nil { + return m.Val + } + return nil +} + +type Int32List struct { + Val []int32 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int32List) Reset() { *m = Int32List{} } +func (m *Int32List) String() string { return proto.CompactTextString(m) } +func (*Int32List) ProtoMessage() {} +func (*Int32List) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{4} +} + +func (m *Int32List) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int32List.Unmarshal(m, b) +} +func (m *Int32List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int32List.Marshal(b, m, deterministic) +} +func (m *Int32List) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int32List.Merge(m, src) +} +func (m *Int32List) XXX_Size() int { + return xxx_messageInfo_Int32List.Size(m) +} +func (m *Int32List) XXX_DiscardUnknown() { + xxx_messageInfo_Int32List.DiscardUnknown(m) +} + +var xxx_messageInfo_Int32List proto.InternalMessageInfo + +func (m *Int32List) GetVal() []int32 { + if m != nil { + return m.Val + } + return nil +} + +type Int64List struct { + Val []int64 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int64List) Reset() { *m = Int64List{} } +func (m *Int64List) String() string { return proto.CompactTextString(m) } +func (*Int64List) ProtoMessage() {} +func (*Int64List) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{5} +} + +func (m *Int64List) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int64List.Unmarshal(m, b) +} +func (m *Int64List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int64List.Marshal(b, m, deterministic) +} +func (m *Int64List) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64List.Merge(m, src) +} +func (m *Int64List) XXX_Size() int { + return xxx_messageInfo_Int64List.Size(m) +} +func (m *Int64List) XXX_DiscardUnknown() { + xxx_messageInfo_Int64List.DiscardUnknown(m) +} + +var xxx_messageInfo_Int64List proto.InternalMessageInfo + +func (m *Int64List) GetVal() []int64 { + if m != nil { + return m.Val + } + return nil +} + +type DoubleList struct { + Val []float64 `protobuf:"fixed64,1,rep,packed,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DoubleList) Reset() { *m = DoubleList{} } +func (m *DoubleList) String() string { return proto.CompactTextString(m) } +func (*DoubleList) ProtoMessage() {} +func (*DoubleList) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{6} +} + +func (m *DoubleList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleList.Unmarshal(m, b) +} +func (m *DoubleList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleList.Marshal(b, m, deterministic) +} +func (m *DoubleList) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleList.Merge(m, src) +} +func (m *DoubleList) XXX_Size() int { + return xxx_messageInfo_DoubleList.Size(m) +} +func (m *DoubleList) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleList.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleList proto.InternalMessageInfo + +func (m *DoubleList) GetVal() []float64 { + if m != nil { + return m.Val + } + return nil +} + +type FloatList struct { + Val []float32 `protobuf:"fixed32,1,rep,packed,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FloatList) Reset() { *m = FloatList{} } +func (m *FloatList) String() string { return proto.CompactTextString(m) } +func (*FloatList) ProtoMessage() {} +func (*FloatList) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{7} +} + +func (m *FloatList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatList.Unmarshal(m, b) +} +func (m *FloatList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatList.Marshal(b, m, deterministic) +} +func (m *FloatList) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatList.Merge(m, src) +} +func (m *FloatList) XXX_Size() int { + return xxx_messageInfo_FloatList.Size(m) +} +func (m *FloatList) XXX_DiscardUnknown() { + xxx_messageInfo_FloatList.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatList proto.InternalMessageInfo + +func (m *FloatList) GetVal() []float32 { + if m != nil { + return m.Val + } + return nil +} + +type BoolList struct { + Val []bool `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoolList) Reset() { *m = BoolList{} } +func (m *BoolList) String() string { return proto.CompactTextString(m) } +func (*BoolList) ProtoMessage() {} +func (*BoolList) Descriptor() ([]byte, []int) { + return fileDescriptor_47c504407d284ecc, []int{8} +} + +func (m *BoolList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoolList.Unmarshal(m, b) +} +func (m *BoolList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoolList.Marshal(b, m, deterministic) +} +func (m *BoolList) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoolList.Merge(m, src) +} +func (m *BoolList) XXX_Size() int { + return xxx_messageInfo_BoolList.Size(m) +} +func (m *BoolList) XXX_DiscardUnknown() { + xxx_messageInfo_BoolList.DiscardUnknown(m) +} + +var xxx_messageInfo_BoolList proto.InternalMessageInfo + +func (m *BoolList) GetVal() []bool { + if m != nil { + return m.Val + } + return nil +} + +func init() { + proto.RegisterEnum("feast.types.ValueType_Enum", ValueType_Enum_name, ValueType_Enum_value) + proto.RegisterType((*ValueType)(nil), "feast.types.ValueType") + proto.RegisterType((*Value)(nil), "feast.types.Value") + proto.RegisterType((*BytesList)(nil), "feast.types.BytesList") + proto.RegisterType((*StringList)(nil), "feast.types.StringList") + proto.RegisterType((*Int32List)(nil), "feast.types.Int32List") + proto.RegisterType((*Int64List)(nil), "feast.types.Int64List") + proto.RegisterType((*DoubleList)(nil), "feast.types.DoubleList") + proto.RegisterType((*FloatList)(nil), "feast.types.FloatList") + proto.RegisterType((*BoolList)(nil), "feast.types.BoolList") +} + +func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_47c504407d284ecc) } + +var fileDescriptor_47c504407d284ecc = []byte{ + // 600 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x94, 0xcf, 0x6e, 0x9b, 0x40, + 0x10, 0xc6, 0xbd, 0xc6, 0xd8, 0x30, 0xf8, 0xcf, 0x06, 0xa9, 0x4d, 0xa4, 0x36, 0x2d, 0xf2, 0x89, + 0x93, 0xa9, 0x12, 0xc4, 0xa5, 0x52, 0xa5, 0xa0, 0x24, 0x35, 0x2a, 0x8a, 0x2b, 0x4c, 0x2d, 0xb5, + 0x97, 0x08, 0x1a, 0xe2, 0xd2, 0x90, 0x10, 0x05, 0x1c, 0xc9, 0xef, 0xd4, 0xa7, 0xe9, 0x13, 0xf4, + 0x51, 0xaa, 0x9d, 0x5d, 0xd6, 0x44, 0xf2, 0xcd, 0xf3, 0xfd, 0xe6, 0xfb, 0xd8, 0xd9, 0x91, 0x17, + 0x0e, 0x6f, 0xb3, 0xa4, 0xaa, 0x9d, 0x7a, 0xfb, 0x98, 0x55, 0xce, 0x2a, 0x29, 0x36, 0xd9, 0xec, + 0xf1, 0xa9, 0xac, 0x4b, 0xd3, 0x40, 0x30, 0x43, 0x30, 0xfd, 0x47, 0x40, 0x47, 0x18, 0x6f, 0x1f, + 0xb3, 0xe9, 0x5f, 0x02, 0xbd, 0x8b, 0x87, 0xcd, 0xbd, 0x69, 0xc0, 0x20, 0xb8, 0x5a, 0x9d, 0x85, + 0xc1, 0x39, 0xed, 0x98, 0x3a, 0xa8, 0xfe, 0xf7, 0xf8, 0x62, 0x49, 0x89, 0x09, 0xd0, 0x5f, 0xc6, + 0x51, 0x70, 0xf5, 0x99, 0x76, 0x99, 0x1c, 0x5c, 0xc5, 0xa7, 0x27, 0x54, 0x11, 0x3f, 0x3d, 0x97, + 0xf6, 0x58, 0xc7, 0xf9, 0xe2, 0x9b, 0x1f, 0x5e, 0x50, 0x95, 0xc9, 0x97, 0xe1, 0xe2, 0x2c, 0xa6, + 0x7d, 0x53, 0x83, 0x9e, 0xbf, 0x58, 0x84, 0x74, 0x60, 0x8e, 0x01, 0x30, 0xed, 0x3a, 0x0c, 0x96, + 0x31, 0x35, 0xcc, 0x09, 0x18, 0x3c, 0x92, 0x0b, 0x43, 0xd6, 0x80, 0xb9, 0xbc, 0x1e, 0x89, 0xda, + 0x73, 0x79, 0x3d, 0x66, 0x06, 0xfe, 0x05, 0x2e, 0x4c, 0x58, 0x03, 0x7e, 0x86, 0xd7, 0xd4, 0x1c, + 0x81, 0xce, 0xbe, 0xc5, 0xcb, 0x83, 0xe9, 0x1f, 0x15, 0x54, 0x1c, 0xd1, 0x3c, 0x06, 0x3d, 0xdd, + 0xd6, 0x59, 0x75, 0xfd, 0x9c, 0x14, 0x47, 0xc4, 0x22, 0xf6, 0x70, 0xde, 0x89, 0x34, 0x94, 0x56, + 0x49, 0x61, 0xbe, 0x07, 0xa8, 0xea, 0xa7, 0xfc, 0x61, 0x8d, 0xbc, 0x6b, 0x11, 0x5b, 0x9f, 0x77, + 0x22, 0x9d, 0x6b, 0xac, 0xe1, 0x18, 0xf4, 0xfc, 0xa1, 0x3e, 0x3d, 0x41, 0xae, 0x58, 0xc4, 0x56, + 0x99, 0x1f, 0xa5, 0x1d, 0xf6, 0x5c, 0xc4, 0x3d, 0x8b, 0xd8, 0x8a, 0xc0, 0x9e, 0x2b, 0xe2, 0x6f, + 0xca, 0x4d, 0x5a, 0x64, 0xc8, 0x55, 0x8b, 0xd8, 0x84, 0xc5, 0x73, 0x4d, 0xf8, 0x6f, 0x8b, 0x32, + 0xa9, 0x91, 0xf7, 0x2d, 0x62, 0x77, 0x99, 0x1f, 0x25, 0x86, 0xdf, 0x80, 0x96, 0x96, 0x65, 0x81, + 0x74, 0x60, 0x11, 0x5b, 0x9b, 0x77, 0xa2, 0x01, 0x53, 0x18, 0xfc, 0x04, 0x63, 0x3e, 0x5a, 0x91, + 0x57, 0x3c, 0xc0, 0xb0, 0x88, 0x6d, 0x9c, 0xbc, 0x9e, 0xb5, 0xb6, 0x3d, 0xf3, 0x59, 0x4b, 0x98, + 0x57, 0xf5, 0xbc, 0x13, 0x0d, 0xd3, 0xa6, 0x60, 0xfe, 0x33, 0x98, 0x88, 0xd9, 0x65, 0xc0, 0x10, + 0x03, 0x0e, 0x5f, 0x04, 0x2c, 0xb1, 0x47, 0x24, 0x8c, 0x2a, 0x59, 0x89, 0x23, 0xf0, 0xdb, 0x91, + 0x09, 0xa3, 0x3d, 0x47, 0x08, 0x58, 0x4b, 0x73, 0x84, 0xbc, 0x29, 0x76, 0x7e, 0xcf, 0xdd, 0xf9, + 0xc7, 0xfb, 0xfd, 0x9e, 0xdb, 0xf2, 0xf3, 0x42, 0x8c, 0x20, 0xee, 0x57, 0x06, 0x4c, 0xf6, 0x8c, + 0x70, 0x8e, 0x3d, 0xcd, 0x08, 0x37, 0xb2, 0x12, 0x47, 0xe0, 0x1b, 0x90, 0x09, 0x74, 0xcf, 0x11, + 0x2e, 0x59, 0x4b, 0x73, 0x84, 0xdb, 0xa6, 0x60, 0xfe, 0x8f, 0x30, 0xc2, 0x15, 0x49, 0xfb, 0x01, + 0xda, 0x5f, 0xbd, 0x5c, 0x42, 0x59, 0x16, 0xc2, 0x6d, 0xa4, 0xe2, 0xf7, 0x2a, 0x29, 0x7c, 0x15, + 0x94, 0xe7, 0xa4, 0x98, 0x1e, 0x83, 0x2e, 0xd7, 0x64, 0x52, 0xd4, 0x8e, 0x88, 0xa5, 0xd8, 0xc3, + 0x08, 0xf1, 0x3b, 0x80, 0xdd, 0x12, 0xda, 0x5c, 0x8f, 0x1a, 0xbb, 0xbc, 0xe2, 0x36, 0x56, 0xdb, + 0x98, 0x5f, 0x5a, 0x1b, 0x2b, 0x32, 0x7d, 0x77, 0x3f, 0x6d, 0x4e, 0xa4, 0x5d, 0x4e, 0xdf, 0xc6, + 0x5d, 0x8e, 0xdf, 0x82, 0xd6, 0x4c, 0xd7, 0xa6, 0x1a, 0x52, 0xff, 0x0b, 0xb4, 0x9f, 0x1e, 0x1f, + 0xf0, 0x4f, 0xf9, 0x95, 0xbd, 0x49, 0x3f, 0x3e, 0xac, 0xf3, 0xfa, 0xd7, 0x26, 0x9d, 0xfd, 0x2c, + 0xef, 0x9d, 0x75, 0xf9, 0x3b, 0xbb, 0x73, 0xf8, 0xeb, 0x55, 0xdd, 0xdc, 0x39, 0xeb, 0xd2, 0xc1, + 0x87, 0xab, 0x72, 0x5a, 0x2f, 0x5a, 0xda, 0x47, 0xed, 0xf4, 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xa3, 0x3f, 0x67, 0x48, 0xe7, 0x04, 0x00, 0x00, +} diff --git a/sdk/go/request.go b/sdk/go/request.go new file mode 100644 index 00000000000..a8d0a408594 --- /dev/null +++ b/sdk/go/request.go @@ -0,0 +1,73 @@ +package feast + +import ( + "fmt" + "github.com/gojek/feast/sdk/go/protos/feast/serving" + "strconv" + "strings" +) + +var ( + ErrInvalidFeatureName = "Invalid feature name %s provided, feature names must be in the format featureSet:version:featureName." +) + +// OnlineFeaturesRequest wrapper on feast.serving.GetOnlineFeaturesRequest. +type OnlineFeaturesRequest struct { + + // Features is the list of features to obtain from Feast. Each feature must be given by its fully qualified ID, + // in the format featureSet:version:featureName. + Features []string + + // Entities is the list of entity rows to retrieve features on. Each row is a map of entity name to entity value. + Entities []Row +} + +// Builds the feast-specified request payload from the wrapper. +func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequest, error) { + featureSets, err := buildFeatureSets(r.Features) + if err != nil { + return nil, err + } + + entityRows := make([]*serving.GetOnlineFeaturesRequest_EntityRow, len(r.Entities)) + + for i := range r.Entities { + entityRows[i] = &serving.GetOnlineFeaturesRequest_EntityRow{ + Fields: r.Entities[i], + } + } + return &serving.GetOnlineFeaturesRequest{ + FeatureSets: featureSets, + EntityRows: entityRows, + }, nil +} + +func buildFeatureSets(features []string) ([]*serving.FeatureSetRequest, error) { + featureSetMap := map[string]*serving.FeatureSetRequest{} + for _, feature := range features { + split := strings.Split(feature, ":") + if len(split) != 3 { + return nil, fmt.Errorf(ErrInvalidFeatureName, feature) + } + featureSetName, featureSetVersion, featureName := split[0], split[1], split[2] + key := featureSetName + ":" + featureSetVersion + if fs, ok := featureSetMap[key]; !ok { + version, err := strconv.Atoi(featureSetVersion) + if err != nil { + return nil, fmt.Errorf(ErrInvalidFeatureName, feature) + } + featureSetMap[key] = &serving.FeatureSetRequest{ + Name: featureSetName, + Version: int32(version), + FeatureNames: []string{featureName}, + } + } else { + fs.FeatureNames = append(fs.GetFeatureNames(), featureName) + } + } + var featureSets []*serving.FeatureSetRequest + for _, featureSet := range featureSetMap { + featureSets = append(featureSets, featureSet) + } + return featureSets, nil +} diff --git a/sdk/go/request_test.go b/sdk/go/request_test.go new file mode 100644 index 00000000000..3da4fd465b8 --- /dev/null +++ b/sdk/go/request_test.go @@ -0,0 +1,106 @@ +package feast + +import ( + "fmt" + "github.com/gojek/feast/sdk/go/protos/feast/serving" + "github.com/gojek/feast/sdk/go/protos/feast/types" + json "github.com/golang/protobuf/jsonpb" + "github.com/google/go-cmp/cmp" + "testing" +) + +func TestGetOnlineFeaturesRequest(t *testing.T) { + tt := []struct { + name string + req OnlineFeaturesRequest + want *serving.GetOnlineFeaturesRequest + wantErr bool + err error + }{ + { + name: "valid", + req: OnlineFeaturesRequest{ + Features: []string{"fs:1:feature1", "fs:1:feature2", "fs:2:feature1"}, + Entities: []Row{ + {"entity1": Int64Val(1), "entity2": StrVal("bob")}, + {"entity1": Int64Val(1), "entity2": StrVal("annie")}, + {"entity1": Int64Val(1), "entity2": StrVal("jane")}, + }, + }, + want: &serving.GetOnlineFeaturesRequest{ + FeatureSets: []*serving.FeatureSetRequest{ + { + Name: "fs", + Version: 1, + FeatureNames: []string{"feature1", "feature2"}, + }, + { + Name: "fs", + Version: 2, + FeatureNames: []string{"feature1"}, + }, + }, + EntityRows: []*serving.GetOnlineFeaturesRequest_EntityRow{ + { + Fields: map[string]*types.Value{ + "entity1": Int64Val(1), + "entity2": StrVal("bob"), + }, + }, + { + Fields: map[string]*types.Value{ + "entity1": Int64Val(1), + "entity2": StrVal("annie"), + }, + }, + { + Fields: map[string]*types.Value{ + "entity1": Int64Val(1), + "entity2": StrVal("jane"), + }, + }, + }, + OmitEntitiesInResponse: false, + }, + wantErr: false, + err: nil, + }, + { + name: "invalid_feature_name/wrong_format", + req: OnlineFeaturesRequest{ + Features: []string{"fs1:feature1"}, + Entities: []Row{}, + }, + wantErr: true, + err: fmt.Errorf(ErrInvalidFeatureName, "fs1:feature1"), + }, + { + name: "invalid_feature_name/invalid_version", + req: OnlineFeaturesRequest{ + Features: []string{"fs:a:feature1"}, + Entities: []Row{}, + }, + wantErr: true, + err: fmt.Errorf(ErrInvalidFeatureName, "fs:a:feature1"), + }, + } + for _, tc := range tt { + t.Run(tc.name, func(t *testing.T) { + got, err := tc.req.buildRequest() + if (err != nil) != tc.wantErr { + t.Errorf("error = %v, wantErr %v", err, tc.wantErr) + return + } + if tc.wantErr && err.Error() != tc.err.Error() { + t.Errorf("error = %v, expected err = %v", err, tc.err) + return + } + if !cmp.Equal(got, tc.want) { + m := json.Marshaler{} + gotJson, _ := m.MarshalToString(got) + wantJson, _ := m.MarshalToString(tc.want) + t.Errorf("got: \n%v\nwant:\n%v", gotJson, wantJson) + } + }) + } +} diff --git a/sdk/go/response.go b/sdk/go/response.go new file mode 100644 index 00000000000..086321cacdf --- /dev/null +++ b/sdk/go/response.go @@ -0,0 +1,81 @@ +package feast + +import ( + "fmt" + "github.com/gojek/feast/sdk/go/protos/feast/serving" + "github.com/gojek/feast/sdk/go/protos/feast/types" +) + +var ( + ErrLengthMismatch = "Length mismatch; number of na values (%d) not equal to number of features requested (%d)." + ErrFeatureNotFound = "Feature %s not found in response." + ErrTypeMismatch = "Requested output of type %s does not match type of feature value returned." +) + +// OnlineFeaturesResponse is a wrapper around serving.GetOnlineFeaturesResponse. +type OnlineFeaturesResponse struct { + RawResponse *serving.GetOnlineFeaturesResponse +} + +// Rows retrieves the result of the request as a list of Rows. +func (r OnlineFeaturesResponse) Rows() []Row { + rows := make([]Row, len(r.RawResponse.FieldValues)) + for i, val := range r.RawResponse.FieldValues { + rows[i] = val.Fields + } + return rows +} + +// Int64Arrays retrieves the result of the request as a list of int64 slices. Any missing values will be filled +// with the missing values provided. +func (r OnlineFeaturesResponse) Int64Arrays(order []string, fillNa []int64) ([][]int64, error) { + rows := make([][]int64, len(r.RawResponse.FieldValues)) + if len(fillNa) != len(order) { + return nil, fmt.Errorf(ErrLengthMismatch, len(fillNa), len(order)) + } + for i, val := range r.RawResponse.FieldValues { + rows[i] = make([]int64, len(order)) + for j, fname := range order { + fValue, exists := val.Fields[fname] + if !exists { + return nil, fmt.Errorf(ErrFeatureNotFound, fname) + } + val := fValue.GetVal() + if val == nil { + rows[i][j] = fillNa[j] + } else if int64Val, ok := val.(*types.Value_Int64Val); ok { + rows[i][j] = int64Val.Int64Val + } else { + return nil, fmt.Errorf(ErrTypeMismatch, "int64") + } + } + } + return rows, nil +} + +// Float64Arrays retrieves the result of the request as a list of float64 slices. Any missing values will be filled +// with the missing values provided. +func (r OnlineFeaturesResponse) Float64Arrays(order []string, fillNa []float64) ([][]float64, error) { + rows := make([][]float64, len(r.RawResponse.FieldValues)) + if len(fillNa) != len(order) { + return nil, fmt.Errorf(ErrLengthMismatch, len(fillNa), len(order)) + } + for i, val := range r.RawResponse.FieldValues { + rows[i] = make([]float64, len(order)) + for j, fname := range order { + fValue, exists := val.Fields[fname] + if !exists { + return nil, fmt.Errorf(ErrFeatureNotFound, fname) + } + val := fValue.GetVal() + if val == nil { + rows[i][j] = fillNa[j] + } else if doubleVal, ok := val.(*types.Value_DoubleVal); ok { + rows[i][j] = doubleVal.DoubleVal + } else { + return nil, fmt.Errorf(ErrTypeMismatch, "float64") + } + } + } + return rows, nil +} diff --git a/sdk/go/response_test.go b/sdk/go/response_test.go new file mode 100644 index 00000000000..9975457c147 --- /dev/null +++ b/sdk/go/response_test.go @@ -0,0 +1,99 @@ +package feast + +import ( + "fmt" + "github.com/gojek/feast/sdk/go/protos/feast/serving" + "github.com/gojek/feast/sdk/go/protos/feast/types" + "github.com/google/go-cmp/cmp" + "testing" +) + +var response = OnlineFeaturesResponse{ +RawResponse: &serving.GetOnlineFeaturesResponse{ + FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{ + { + Fields: map[string]*types.Value{ + "fs:1:feature1": Int64Val(1), + "fs:1:feature2": &types.Value{}, + }, + }, + { + Fields: map[string]*types.Value{ + "fs:1:feature1": Int64Val(2), + "fs:1:feature2": Int64Val(2), + }, + }, + }, +}, +} + +func TestOnlineFeaturesResponseToRow(t *testing.T) { + actual := response.Rows() + expected := []Row{ + {"fs:1:feature1": Int64Val(1), "fs:1:feature2": &types.Value{}}, + {"fs:1:feature1": Int64Val(2), "fs:1:feature2": Int64Val(2)}, + } + if !cmp.Equal(actual, expected) { + t.Errorf("expected: %v, got: %v", expected, actual) + } +} + +func TestOnlineFeaturesResponseToInt64Array(t *testing.T) { + type args struct { + order []string + fillNa []int64 + } + tt := []struct { + name string + args args + want [][]int64 + wantErr bool + err error + }{ + { + name: "valid", + args: args{ + order: []string{"fs:1:feature2", "fs:1:feature1"}, + fillNa: []int64{-1, -1}, + }, + want: [][]int64{{-1, 1}, {2, 2}}, + wantErr: false, + }, + { + name: "length mismatch", + args: args{ + order: []string{"fs:1:feature2", "fs:1:feature1"}, + fillNa: []int64{-1}, + }, + want: nil, + wantErr: true, + err: fmt.Errorf(ErrLengthMismatch, 1, 2), + }, + { + name: "length mismatch", + args: args{ + order: []string{"fs:1:feature2", "fs:1:feature3"}, + fillNa: []int64{-1, -1}, + }, + want: nil, + wantErr: true, + err: fmt.Errorf(ErrFeatureNotFound, "fs:1:feature3"), + }, + } + for _, tc := range tt { + t.Run(tc.name, func(t *testing.T) { + got, err := response.Int64Arrays(tc.args.order, tc.args.fillNa) + if (err != nil) != tc.wantErr { + t.Errorf("error = %v, wantErr %v", err, tc.wantErr) + return + } + if tc.wantErr && err.Error() != tc.err.Error() { + t.Errorf("error = %v, expected err = %v", err, tc.err) + return + } + if !cmp.Equal(got, tc.want) { + t.Errorf("got: \n%v\nwant:\n%v", got, tc.want) + } + }) + } +} \ No newline at end of file diff --git a/sdk/go/types.go b/sdk/go/types.go new file mode 100644 index 00000000000..74606982a4a --- /dev/null +++ b/sdk/go/types.go @@ -0,0 +1,40 @@ +package feast + +import "github.com/gojek/feast/sdk/go/protos/feast/types" + +type Row map[string]*types.Value + +// StrVal is a int64 type feast value +func StrVal(val string) *types.Value { + return &types.Value{Val: &types.Value_StringVal{StringVal: val}} +} + +// Int32Val is a int64 type feast value +func Int32Val(val int32) *types.Value { + return &types.Value{Val: &types.Value_Int32Val{Int32Val: val}} +} + +// Int64Val is a int64 type feast value +func Int64Val(val int64) *types.Value { + return &types.Value{Val: &types.Value_Int64Val{Int64Val: val}} +} + +// FloatVal is a float32 type feast value +func FloatVal(val float32) *types.Value { + return &types.Value{Val: &types.Value_FloatVal{FloatVal: val}} +} + +// DoubleVal is a float64 type feast value +func DoubleVal(val float64) *types.Value { + return &types.Value{Val: &types.Value_DoubleVal{DoubleVal: val}} +} + +// BoolVal is a bool type feast value +func BoolVal(val bool) *types.Value { + return &types.Value{Val: &types.Value_BoolVal{BoolVal: val}} +} + +// BytesVal is a bytes type feast value +func BytesVal(val []byte) *types.Value { + return &types.Value{Val: &types.Value_BytesVal{BytesVal: val}} +} \ No newline at end of file diff --git a/sdk/java/pom.xml b/sdk/java/pom.xml new file mode 100644 index 00000000000..bd44d5af2cf --- /dev/null +++ b/sdk/java/pom.xml @@ -0,0 +1,91 @@ + + + 4.0.0 + + Feast SDK for Java + SDK for registering, storing, and retrieving features + feast-client + + + feast + feast-parent + ${revision} + ../.. + + + + + 5.5.2 + + + + + + io.grpc + grpc-netty-shaded + + + io.grpc + grpc-protobuf + + + io.grpc + grpc-stub + + + com.google.protobuf + protobuf-java-util + + + com.google.protobuf + protobuf-java + + + + + org.slf4j + slf4j-api + + + + + org.junit.jupiter + junit-jupiter-engine + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit.version} + test + + + org.apache.commons + commons-lang3 + 3.6 + compile + + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + + + + + + diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java new file mode 100644 index 00000000000..9f93f56c9cf --- /dev/null +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java @@ -0,0 +1,120 @@ +package com.gojek.feast.v1alpha1; + +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingServiceGrpc; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SuppressWarnings("WeakerAccess") +public class FeastClient implements AutoCloseable { + Logger logger = LoggerFactory.getLogger(FeastClient.class); + + private static final int CHANNEL_SHUTDOWN_TIMEOUT_SEC = 5; + + private final ManagedChannel channel; + private final ServingServiceGrpc.ServingServiceBlockingStub stub; + + /** + * Create a client to access Feast + * + * @param host hostname or ip address of Feast serving GRPC server + * @param port port number of Feast serving GRPC server + * @return {@link FeastClient} + */ + public static FeastClient create(String host, int port) { + ManagedChannel channel = ManagedChannelBuilder.forAddress(host, port).usePlaintext().build(); + return new FeastClient(channel); + } + + public GetFeastServingInfoResponse getFeastServingInfo() { + return stub.getFeastServingInfo(GetFeastServingInfoRequest.newBuilder().build()); + } + + /** + * Get online features from Feast. + * + *

See {@link #getOnlineFeatures(List, List, boolean)} + * + * @param featureIds list of feature id to retrieve, feature id follows this format + * [feature_set_name]:[version]:[feature_name] + * @param rows list of {@link Row} to select the entities to retrieve the features for + * @return list of {@link Row} containing features + */ + public List getOnlineFeatures(List featureIds, List rows) { + return getOnlineFeatures(featureIds, rows, false); + } + + /** + * Get online features from Feast. + * + *

Example of retrieving online features for driver feature set, version 1, with features + * driver_id and driver_name + * + *

{@code
+   * FeastClient client = FeastClient.create("localhost", 6566);
+   * List requestedFeatureIds = Arrays.asList("driver:1:driver_id", "driver:1:driver_name");
+   * List requestedRows =
+   *         Arrays.asList(Row.create().set("driver_id", 123), Row.create().set("driver_id", 456));
+   * List retrievedFeatures = client.getOnlineFeatures(requestedFeatureIds, requestedRows);
+   * retrievedFeatures.forEach(System.out::println);
+   * }
+ * + * @param featureIds list of feature id to retrieve, feature id follows this format + * [feature_set_name]:[version]:[feature_name] + * @param rows list of {@link Row} to select the entities to retrieve the features for + * @param omitEntitiesInResponse if true, the returned {@link Row} will not contain field and + * value for the entity + * @return list of {@link Row} containing features + */ + public List getOnlineFeatures( + List featureIds, List rows, boolean omitEntitiesInResponse) { + List featureSets = RequestUtil.createFeatureSets(featureIds); + List entityRows = + rows.stream() + .map( + row -> + EntityRow.newBuilder() + .setEntityTimestamp(row.getEntityTimestamp()) + .putAllFields(row.getFields()) + .build()) + .collect(Collectors.toList()); + + GetOnlineFeaturesResponse response = + stub.getOnlineFeatures( + GetOnlineFeaturesRequest.newBuilder() + .addAllFeatureSets(featureSets) + .addAllEntityRows(entityRows) + .setOmitEntitiesInResponse(omitEntitiesInResponse) + .build()); + + return response.getFieldValuesList().stream() + .map( + field -> { + Row row = Row.create(); + field.getFieldsMap().forEach(row::set); + return row; + }) + .collect(Collectors.toList()); + } + + private FeastClient(ManagedChannel channel) { + this.channel = channel; + stub = ServingServiceGrpc.newBlockingStub(channel); + } + + public void close() throws Exception { + if (channel != null) { + channel.shutdown().awaitTermination(CHANNEL_SHUTDOWN_TIMEOUT_SEC, TimeUnit.SECONDS); + } + } +} diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java new file mode 100644 index 00000000000..082b1f3d2f8 --- /dev/null +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java @@ -0,0 +1,59 @@ +package com.gojek.feast.v1alpha1; + +import feast.serving.ServingAPIProto.FeatureSetRequest; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; + +@SuppressWarnings("WeakerAccess") +public class RequestUtil { + public static List createFeatureSets(List featureIds) { + if (featureIds == null) { + throw new IllegalArgumentException("featureIds cannot be null"); + } + + // featureSetMap is a map of pair of feature set name and version -> a list of feature names + Map, List> featureSetMap = new HashMap<>(); + + for (String featureId : featureIds) { + String[] parts = featureId.split(":"); + if (parts.length < 3) { + throw new IllegalArgumentException( + String.format( + "Feature id '%s' has invalid format. Expected format: ::.", + featureId)); + } + String featureSetName = parts[0]; + int featureSetVersion; + try { + featureSetVersion = Integer.parseInt(parts[1]); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + String.format( + "Feature id '%s' contains invalid version. Expected format: ::.", + parts[1])); + } + + Pair key = new ImmutablePair<>(featureSetName, featureSetVersion); + if (!featureSetMap.containsKey(key)) { + featureSetMap.put(key, new ArrayList<>()); + } + String featureName = parts[2]; + featureSetMap.get(key).add(featureName); + } + + return featureSetMap.entrySet().stream() + .map( + entry -> + FeatureSetRequest.newBuilder() + .setName(entry.getKey().getKey()) + .setVersion(entry.getKey().getValue()) + .addAllFeatureNames(entry.getValue()) + .build()) + .collect(Collectors.toList()); + } +} diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java new file mode 100644 index 00000000000..78ea419f7c4 --- /dev/null +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java @@ -0,0 +1,128 @@ +package com.gojek.feast.v1alpha1; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; +import feast.types.ValueProto.Value; +import feast.types.ValueProto.Value.ValCase; +import java.time.Instant; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +@SuppressWarnings("UnusedReturnValue") +public class Row { + private Timestamp entity_timestamp; + private Map fields; + + public static Row create() { + Row row = new Row(); + row.entity_timestamp = Timestamps.fromMillis(System.currentTimeMillis()); + row.fields = new HashMap<>(); + return row; + } + + public Row setEntityTimestamp(Instant timestamp) { + entity_timestamp = Timestamps.fromMillis(timestamp.toEpochMilli()); + return this; + } + + public Timestamp getEntityTimestamp() { + return entity_timestamp; + } + + public Row setEntityTimestamp(String dateTime) { + entity_timestamp = Timestamps.fromMillis(Instant.parse(dateTime).toEpochMilli()); + return this; + } + + public Row set(String fieldName, Object value) { + String valueType = value.getClass().getCanonicalName(); + switch (valueType) { + case "java.lang.Integer": + fields.put(fieldName, Value.newBuilder().setInt32Val((int) value).build()); + break; + case "java.lang.Long": + fields.put(fieldName, Value.newBuilder().setInt64Val((long) value).build()); + break; + case "java.lang.Float": + fields.put(fieldName, Value.newBuilder().setFloatVal((float) value).build()); + break; + case "java.lang.Double": + fields.put(fieldName, Value.newBuilder().setDoubleVal((double) value).build()); + break; + case "java.lang.String": + fields.put(fieldName, Value.newBuilder().setStringVal((String) value).build()); + break; + case "byte[]": + fields.put( + fieldName, Value.newBuilder().setBytesVal(ByteString.copyFrom((byte[]) value)).build()); + break; + case "feast.types.ValueProto.Value": + fields.put(fieldName, (Value) value); + break; + default: + throw new IllegalArgumentException( + String.format( + "Type '%s' is unsupported in Feast. Please use one of these value types: Integer, Long, Float, Double, String, byte[].", + valueType)); + } + return this; + } + + public Map getFields() { + return fields; + } + + public Integer getInt(String fieldName) { + return getValue(fieldName).map(Value::getInt32Val).orElse(null); + } + + public Long getLong(String fieldName) { + return getValue(fieldName).map(Value::getInt64Val).orElse(null); + } + + public Float getFloat(String fieldName) { + return getValue(fieldName).map(Value::getFloatVal).orElse(null); + } + + public Double getDouble(String fieldName) { + return getValue(fieldName).map(Value::getDoubleVal).orElse(null); + } + + public String getString(String fieldName) { + return getValue(fieldName).map(Value::getStringVal).orElse(null); + } + + public byte[] getByte(String fieldName) { + return getValue(fieldName).map(Value::getBytesVal).map(ByteString::toByteArray).orElse(null); + } + + @Override + public String toString() { + List parts = new ArrayList<>(); + fields.forEach( + (key, value) -> + parts.add( + key + + ":" + + (value.getValCase().equals(ValCase.VAL_NOT_SET) + ? "NULL" + : value.toString().trim()))); + return String.join(", ", parts); + } + + private Optional getValue(String fieldName) { + if (!fields.containsKey(fieldName)) { + throw new IllegalArgumentException( + String.format("Row does not contain field '%s'", fieldName)); + } + Value value = fields.get(fieldName); + if (value.getValCase().equals(ValCase.VAL_NOT_SET)) { + return Optional.empty(); + } + return Optional.of(value); + } +} diff --git a/sdk/java/src/main/proto/feast b/sdk/java/src/main/proto/feast new file mode 120000 index 00000000000..463e4045de1 --- /dev/null +++ b/sdk/java/src/main/proto/feast @@ -0,0 +1 @@ +../../../../../protos/feast \ No newline at end of file diff --git a/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java b/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java new file mode 100644 index 00000000000..76503bd308a --- /dev/null +++ b/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java @@ -0,0 +1,87 @@ +package com.gojek.feast.v1alpha1; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.google.protobuf.TextFormat; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.NullSource; + +class RequestUtilTest { + + private static Stream provideValidFeatureIds() { + return Stream.of( + Arguments.of( + Collections.singletonList("driver:1:driver_id"), + Collections.singletonList( + FeatureSetRequest.newBuilder() + .setName("driver") + .setVersion(1) + .addFeatureNames("driver_id"))), + Arguments.of( + Arrays.asList("driver:1:driver_id", "driver:1:driver_name"), + Collections.singletonList( + FeatureSetRequest.newBuilder() + .setName("driver") + .setVersion(1) + .addAllFeatureNames(Arrays.asList("driver_id", "driver_name")) + .build())), + Arguments.of( + Arrays.asList("driver:1:driver_id", "driver:1:driver_name", "booking:2:booking_id"), + Arrays.asList( + FeatureSetRequest.newBuilder() + .setName("driver") + .setVersion(1) + .addAllFeatureNames(Arrays.asList("driver_id", "driver_name")) + .build(), + FeatureSetRequest.newBuilder() + .setName("booking") + .setVersion(2) + .addFeatureNames("booking_id") + .build()))); + } + + @ParameterizedTest + @MethodSource("provideValidFeatureIds") + void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureIds( + List input, List expected) { + List actual = RequestUtil.createFeatureSets(input); + // Order of the actual and expected featureSets do no not matter + actual.sort(Comparator.comparing(FeatureSetRequest::getName)); + expected.sort(Comparator.comparing(FeatureSetRequest::getName)); + assertEquals(expected.size(), actual.size()); + for (int i = 0; i < expected.size(); i++) { + String expectedString = TextFormat.printer().printToString(expected.get(i)); + String actualString = TextFormat.printer().printToString(actual.get(i)); + assertEquals(expectedString, actualString); + } + } + + private static Stream provideInvalidFeatureIds() { + return Stream.of( + Arguments.of(Collections.singletonList("feature_set_only")), + Arguments.of(Collections.singletonList("missing:feature_name")), + Arguments.of(Collections.singletonList("invalid:version:value")), + Arguments.of(Collections.singletonList(""))); + } + + @ParameterizedTest + @MethodSource("provideInvalidFeatureIds") + void createFeatureSets_ShouldThrowExceptionForInvalidFeatureIds(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureSets(input)); + } + + @ParameterizedTest + @NullSource + void createFeatureSets_ShouldThrowExceptionForNullFeatureIds(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureSets(input)); + } +} diff --git a/sdk/python/Makefile b/sdk/python/Makefile new file mode 100644 index 00000000000..f940812ef98 --- /dev/null +++ b/sdk/python/Makefile @@ -0,0 +1,20 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +TEST_PATH=./ + +test: + pytest --verbose --color=yes $(TEST_PATH) diff --git a/sdk/python/feast/sdk/__init__.py b/sdk/python/__init__.py similarity index 100% rename from sdk/python/feast/sdk/__init__.py rename to sdk/python/__init__.py diff --git a/sdk/python/cli.py b/sdk/python/cli.py new file mode 100644 index 00000000000..652bc038854 --- /dev/null +++ b/sdk/python/cli.py @@ -0,0 +1,239 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import logging +import click +from feast import config as feast_config +from feast.client import Client +from feast.resource import ResourceFactory +from feast.feature_set import FeatureSet +import toml +import pkg_resources +from feast.loaders import file +import yaml +import json + +_logger = logging.getLogger(__name__) + +_common_options = [ + click.option("--core-url", help="Set Feast core URL to connect to"), + click.option("--serving-url", help="Set Feast serving URL to connect to"), +] + + +def common_options(func): + for option in reversed(_common_options): + func = option(func) + return func + + +@click.group() +def cli(): + pass + + +@cli.command() +@click.option( + "--client-only", "-c", is_flag=True, help="Print only the version of the CLI" +) +@common_options +def version(client_only: bool, **kwargs): + """ + Displays version and connectivity information + """ + + try: + feast_versions_dict = { + "sdk": {"version": str(pkg_resources.get_distribution("feast"))} + } + + if not client_only: + feast_client = Client( + core_url=feast_config.get_config_property_or_fail( + "core_url", cli_config=kwargs + ), + serving_url=feast_config.get_config_property_or_fail( + "serving_url", cli_config=kwargs + ), + ) + feast_versions_dict.update(feast_client.version()) + + print(json.dumps(feast_versions_dict)) + except Exception as e: + _logger.error("Error initializing backend store") + _logger.exception(e) + sys.exit(1) + + +@cli.group() +def config(): + """ + View and edit Feast properties + """ + pass + + +@config.command(name="list") +def config_list(): + """ + List Feast properties for the currently active configuration + """ + + try: + feast_config_string = toml.dumps(feast_config.get_or_create_config()) + if not feast_config_string.strip(): + print("Configuration has not been set") + else: + print(feast_config_string.replace('""', "").strip()) + except Exception as e: + _logger.error("Error occurred when reading Feast configuration file") + _logger.exception(e) + sys.exit(1) + + +@config.command(name="set") +@click.argument("prop") +@click.argument("value") +def config_set(prop, value): + """ + Set a Feast properties for the currently active configuration + """ + try: + feast_config.set_property(prop.strip(), value.strip()) + except Exception as e: + _logger.error("Error in reading config file") + _logger.exception(e) + sys.exit(1) + + +@cli.group(name="feature-sets") +def feature_set(): + """ + Create and manage feature sets + """ + pass + + +@feature_set.command() +def list(): + """ + List all feature sets + """ + feast_client = Client( + core_url=feast_config.get_config_property_or_fail("core_url") + ) # type: Client + + table = [] + for fs in feast_client.list_feature_sets(): + table.append([fs.name, fs.version]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME", "VERSION"], tablefmt="plain")) + + +@feature_set.command() +@click.argument("name") +def create(name): + """ + Create a feature set + """ + feast_client = Client( + core_url=feast_config.get_config_property_or_fail("core_url") + ) # type: Client + + feast_client.apply(FeatureSet(name=name)) + + +@feature_set.command() +@click.argument("name", type=click.STRING) +@click.argument("version", type=click.INT) +def describe(name: str, version: int): + """ + Describe a feature set + """ + feast_client = Client( + core_url=feast_config.get_config_property_or_fail("core_url") + ) # type: Client + + fs = feast_client.get_feature_set(name=name, version=version) + if not fs: + print( + f'Feature set with name "{name}" and version "{version}" could not be found' + ) + return + + print(yaml.dump(yaml.safe_load(str(fs)), default_flow_style=False, sort_keys=False)) + + +@cli.command() +@click.option( + "--name", "-n", help="Feature set name to ingest data into", required=True +) +@click.option( + "--version", "-v", help="Feature set version to ingest data into", type=int +) +@click.option( + "--filename", + "-f", + help="Path to file to be ingested", + type=click.Path(exists=True), + required=True, +) +@click.option( + "--file-type", + "-t", + type=click.Choice(["CSV"], case_sensitive=False), + help="Type of file to ingest. Defaults to CSV.", +) +def ingest(name, version, filename, file_type): + """ + Ingest feature data into a feature set + """ + + feast_client = Client( + core_url=feast_config.get_config_property_or_fail("core_url") + ) # type: Client + + feature_set = feast_client.get_feature_set(name=name, version=version) + feature_set.ingest_file(file_path=filename) + + +@cli.command() +@click.option( + "--filename", + "-f", + help="Path to the configuration file that will be applied", + type=click.Path(exists=True), +) +def apply(filename): + """ + Apply a configuration to a resource by filename or stdin + """ + + resources = [ + ResourceFactory.get_resource(res_dict["kind"]).from_dict(res_dict) + for res_dict in file.yaml_loader(filename) + ] + + feast_client = Client( + core_url=feast_config.get_config_property_or_fail("core_url") + ) # type: Client + + feast_client.apply(resources) + + +if __name__ == "__main__": + cli() diff --git a/sdk/python/examples/quickstart/Quickstart.ipynb b/sdk/python/examples/quickstart/Quickstart.ipynb index e915983ac54..0e3e441eb05 100644 --- a/sdk/python/examples/quickstart/Quickstart.ipynb +++ b/sdk/python/examples/quickstart/Quickstart.ipynb @@ -263,8 +263,8 @@ "outputs": [], "source": [ "FEAST_CORE_URL = 'localhost:8433'\n", - "FEAST_SERVING_URL = 'feast-serving.sandbox.s.ds.golabs.io:8433'\n", - "STAGING_LOCATION = 'gs://zzz-bubub/'" + "FEAST_SERVING_URL = 'localhost:8433'\n", + "STAGING_LOCATION = 'gs://staging-location/'" ] }, { @@ -403,11 +403,11 @@ "entity: ride\n", "dataStores: {}\n", "\n", - "Staging file to remote path gs://zzz-bubub//tmp_ride_1559191176607.csv\n", + "Staging file to remote path gs://staging-location//tmp_ride_1559191176607.csv\n", "Submitting job with spec:\n", " type: file.csv\n", "sourceOptions:\n", - " path: gs://zzz-bubub//tmp_ride_1559191176607.csv\n", + " path: gs://staging-location//tmp_ride_1559191176607.csv\n", "entities:\n", "- ride\n", "schema:\n", @@ -691,9 +691,9 @@ ], "metadata": { "kernelspec": { - "display_name": "PyCharm (feast-python-sdk)", + "display_name": "Python 3", "language": "python", - "name": "pycharm-e7e8e038" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -705,7 +705,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.2" + "version": "3.7.3" }, "pycharm": { "stem_cell": { diff --git a/sdk/python/README.md b/sdk/python/examples/quickstart/feast_0.1_tutorial.md similarity index 98% rename from sdk/python/README.md rename to sdk/python/examples/quickstart/feast_0.1_tutorial.md index 0a97ae2f60f..007d89a19ce 100644 --- a/sdk/python/README.md +++ b/sdk/python/examples/quickstart/feast_0.1_tutorial.md @@ -110,7 +110,7 @@ Also, it's possible to retrieve only these features required for training, by specifying them in a `FeatureSet`: ```python -from feast.sdk.resources.feature_set import FeatureSet +from feature_set import FeatureSet training_fs = FeatureSet(entity="word", features=["word.count"]) diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index e69de29bb2d..bcd714d32b8 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -0,0 +1,6 @@ +from .client import Client +from .entity import Entity +from .feature_set import FeatureSet +from .feature import Feature +from .source import Source, KafkaSource +from .value_type import ValueType diff --git a/sdk/python/feast/client.py b/sdk/python/feast/client.py new file mode 100644 index 00000000000..18ce1ad381a --- /dev/null +++ b/sdk/python/feast/client.py @@ -0,0 +1,538 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import os +from collections import OrderedDict +from typing import Dict, Union +from typing import List +import grpc +import pandas as pd +from feast.loaders.ingest import ingest_kafka + +from feast.exceptions import format_grpc_exception +from feast.core.CoreService_pb2 import ( + GetFeastCoreVersionRequest, + ListFeatureSetsResponse, + ApplyFeatureSetRequest, + ListFeatureSetsRequest, + ApplyFeatureSetResponse, + GetFeatureSetRequest, + GetFeatureSetResponse, +) +from feast.core.CoreService_pb2_grpc import CoreServiceStub +from feast.feature_set import FeatureSet, Entity +from feast.job import Job +from feast.serving.ServingService_pb2 import ( + GetOnlineFeaturesRequest, + GetBatchFeaturesRequest, + GetFeastServingInfoRequest, + GetOnlineFeaturesResponse, + DatasetSource, + DataFormat, + FeatureSetRequest, + FeastServingType, +) +from feast.serving.ServingService_pb2_grpc import ServingServiceStub +from feast.serving.ServingService_pb2 import GetFeastServingInfoResponse +from urllib.parse import urlparse +import uuid +import numpy as np +import sys +from feast.loaders.file import export_dataframe_to_staging_location + +_logger = logging.getLogger(__name__) + +GRPC_CONNECTION_TIMEOUT_DEFAULT = 3 # type: int +GRPC_CONNECTION_TIMEOUT_APPLY = 600 # type: int +FEAST_SERVING_URL_ENV_KEY = "FEAST_SERVING_URL" # type: str +FEAST_CORE_URL_ENV_KEY = "FEAST_CORE_URL" # type: str +BATCH_FEATURE_REQUEST_WAIT_TIME_SECONDS = 300 +CPU_COUNT = os.cpu_count() # type: int + + +class Client: + def __init__( + self, core_url: str = None, serving_url: str = None, verbose: bool = False + ): + self._core_url = core_url + self._serving_url = serving_url + self._verbose = verbose + self.__core_channel: grpc.Channel = None + self.__serving_channel: grpc.Channel = None + self._core_service_stub: CoreServiceStub = None + self._serving_service_stub: ServingServiceStub = None + + @property + def core_url(self) -> str: + if self._core_url is not None: + return self._core_url + if os.getenv(FEAST_CORE_URL_ENV_KEY) is not None: + return os.getenv(FEAST_CORE_URL_ENV_KEY) + return "" + + @core_url.setter + def core_url(self, value: str): + self._core_url = value + + @property + def serving_url(self) -> str: + if self._serving_url is not None: + return self._serving_url + if os.getenv(FEAST_SERVING_URL_ENV_KEY) is not None: + return os.getenv(FEAST_SERVING_URL_ENV_KEY) + return "" + + @serving_url.setter + def serving_url(self, value: str): + self._serving_url = value + + def version(self): + """ + Returns version information from Feast Core and Feast Serving + :return: Dictionary containing Core and Serving versions and status + """ + + self._connect_core() + self._connect_serving() + + core_version = "" + serving_version = "" + core_status = "not connected" + serving_status = "not connected" + + try: + core_version = self._core_service_stub.GetFeastCoreVersion( + GetFeastCoreVersionRequest(), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ).version + core_status = "connected" + except grpc.RpcError as e: + print(format_grpc_exception("GetFeastCoreVersion", e.code(), e.details())) + + try: + serving_version = self._serving_service_stub.GetFeastServingInfo( + GetFeastServingInfoRequest(), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ).version + serving_status = "connected" + except grpc.RpcError as e: + print(format_grpc_exception("GetFeastServingInfo", e.code(), e.details())) + + return { + "core": { + "url": self.core_url, + "version": core_version, + "status": core_status, + }, + "serving": { + "url": self.serving_url, + "version": serving_version, + "status": serving_status, + }, + } + + def _connect_core(self, skip_if_connected=True): + """ + Connect to Core API + """ + if skip_if_connected and self._core_service_stub: + return + + if not self.core_url: + raise ValueError("Please set Feast Core URL.") + + if self.__core_channel is None: + self.__core_channel = grpc.insecure_channel(self.core_url) + + try: + grpc.channel_ready_future(self.__core_channel).result( + timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) + except grpc.FutureTimeoutError: + print( + f"Connection timed out while attempting to connect to Feast Core gRPC server {self.core_url}" + ) + sys.exit(1) + else: + self._core_service_stub = CoreServiceStub(self.__core_channel) + + def _connect_serving(self, skip_if_connected=True): + """ + Connect to Serving API + """ + + if skip_if_connected and self._serving_service_stub: + return + + if not self.serving_url: + raise ValueError("Please set Feast Serving URL.") + + if self.__serving_channel is None: + self.__serving_channel = grpc.insecure_channel(self.serving_url) + + try: + grpc.channel_ready_future(self.__serving_channel).result( + timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) + except grpc.FutureTimeoutError: + print( + f"Connection timed out while attempting to connect to Feast Serving gRPC server {self.serving_url} " + ) + sys.exit(1) + else: + self._serving_service_stub = ServingServiceStub(self.__serving_channel) + + def apply(self, feature_sets: Union[List[FeatureSet], FeatureSet]): + """ + Idempotently registers feature set(s) with Feast Core. Either a single feature set or a list can be provided. + :param feature_sets: Union[List[FeatureSet], FeatureSet] + """ + if not isinstance(feature_sets, list): + feature_sets = [feature_sets] + for feature_set in feature_sets: + if isinstance(feature_set, FeatureSet): + self._apply_feature_set(feature_set) + continue + raise ValueError( + f"Could not determine feature set type to apply {feature_set}" + ) + + def _apply_feature_set(self, feature_set: FeatureSet): + self._connect_core() + feature_set._client = self + + valid, message = feature_set.is_valid() + if not valid: + raise Exception(message) + try: + apply_fs_response = self._core_service_stub.ApplyFeatureSet( + ApplyFeatureSetRequest(feature_set=feature_set.to_proto()), + timeout=GRPC_CONNECTION_TIMEOUT_APPLY, + ) # type: ApplyFeatureSetResponse + applied_fs = FeatureSet.from_proto(apply_fs_response.feature_set) + + if apply_fs_response.status == ApplyFeatureSetResponse.Status.CREATED: + print( + f'Feature set updated/created: "{applied_fs.name}:{applied_fs.version}".' + ) + feature_set._update_from_feature_set(applied_fs, is_dirty=False) + return + if apply_fs_response.status == ApplyFeatureSetResponse.Status.NO_CHANGE: + print(f"No change detected in feature set {feature_set.name}") + return + except grpc.RpcError as e: + print(format_grpc_exception("ApplyFeatureSet", e.code(), e.details())) + + def list_feature_sets(self) -> List[FeatureSet]: + """ + Retrieve a list of feature sets from Feast Core + :return: Returns a list of feature sets + """ + self._connect_core() + + try: + # Get latest feature sets from Feast Core + feature_set_protos = self._core_service_stub.ListFeatureSets( + ListFeatureSetsRequest() + ) # type: ListFeatureSetsResponse + except grpc.RpcError as e: + raise Exception( + format_grpc_exception("ListFeatureSets", e.code(), e.details()) + ) + + # Store list of feature sets + feature_sets = [] + for feature_set_proto in feature_set_protos.feature_sets: + feature_set = FeatureSet.from_proto(feature_set_proto) + feature_set._client = self + feature_sets.append(feature_set) + return feature_sets + + def get_feature_set( + self, name: str, version: int = None, fail_if_missing: bool = False + ) -> Union[FeatureSet, None]: + """ + Retrieve a single feature set from Feast Core + :param name: (str) Name of feature set + :param version: (int) Version of feature set + :param fail_if_missing: (bool) Throws an exception if the feature set is not + found + :return: Returns a single feature set + + """ + self._connect_core() + try: + get_feature_set_response = self._core_service_stub.GetFeatureSet( + GetFeatureSetRequest(name=name.strip(), version=str(version)) + ) # type: GetFeatureSetResponse + feature_set = get_feature_set_response.feature_set + except grpc.RpcError as e: + print(format_grpc_exception("GetFeatureSet", e.code(), e.details())) + else: + if feature_set is not None: + return FeatureSet.from_proto(feature_set) + + if fail_if_missing: + raise Exception( + f'Could not find feature set with name "{name}" and ' + f'version "{version}"' + ) + + def list_entities(self) -> Dict[str, Entity]: + """ + Returns a dictionary of entities across all feature sets + :return: Dictionary of entity name to Entity + """ + entities_dict = OrderedDict() + for fs in self.list_feature_sets(): + for entity in fs.entities: + entities_dict[entity.name] = entity + return entities_dict + + def get_batch_features( + self, feature_ids: List[str], entity_rows: pd.DataFrame + ) -> Job: + """ + Retrieves historical features from a Feast Serving deployment. + + Args: + feature_ids: List of feature ids that will be returned for each entity. + Each feature id should have the following format "feature_set_name:version:feature_name". + + entity_rows: Pandas dataframe containing entities and a 'datetime' column. Each entity in + a feature set must be present as a column in this dataframe. The datetime column must + contain timestamps in datetime64 format + + Returns: + Feast batch retrieval job: feast.job.Job + + Example usage: + ============================================================ + >>> from feast import Client + >>> from datetime import datetime + >>> + >>> feast_client = Client(core_url="localhost:6565", serving_url="localhost:6566") + >>> feature_ids = ["customer:1:bookings_7d"] + >>> entity_rows = pd.DataFrame( + >>> { + >>> "datetime": [pd.datetime.now() for _ in range(3)], + >>> "customer": [1001, 1002, 1003], + >>> } + >>> ) + >>> feature_retrieval_job = feast_client.get_batch_features(feature_ids, entity_rows) + >>> df = feature_retrieval_job.to_dataframe() + >>> print(df) + """ + + self._connect_serving() + + try: + fs_request = _build_feature_set_request(feature_ids) + + # Validate entity rows based on entities in Feast Core + self._validate_entity_rows_for_batch_retrieval(entity_rows, fs_request) + + # We want the timestamp column naming to be consistent with the + # rest of Feast + entity_rows.columns = [ + "event_timestamp" if col == "datetime" else col + for col in entity_rows.columns + ] + + # Remove timezone from datetime column + if isinstance( + entity_rows["event_timestamp"].dtype, + pd.core.dtypes.dtypes.DatetimeTZDtype, + ): + entity_rows["event_timestamp"] = pd.DatetimeIndex( + entity_rows["event_timestamp"] + ).tz_localize(None) + + # Retrieve serving information to determine store type and staging location + serving_info = self._serving_service_stub.GetFeastServingInfo( + GetFeastServingInfoRequest(), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) # type: GetFeastServingInfoResponse + + if serving_info.type != FeastServingType.FEAST_SERVING_TYPE_BATCH: + raise Exception( + f'You are connected to a store "{self._serving_url}" which does not support batch retrieval' + ) + + # Export and upload entity row dataframe to staging location provided by Feast + staged_file = export_dataframe_to_staging_location( + entity_rows, serving_info.job_staging_location + ) # type: str + + request = GetBatchFeaturesRequest( + feature_sets=fs_request, + dataset_source=DatasetSource( + file_source=DatasetSource.FileSource( + file_uris=[staged_file], data_format=DataFormat.DATA_FORMAT_AVRO + ) + ), + ) + + # Retrieve Feast Job object to manage life cycle of retrieval + response = self._serving_service_stub.GetBatchFeatures(request) + return Job(response.job, self._serving_service_stub) + + except grpc.RpcError as e: + print(format_grpc_exception("GetBatchFeatures", e.code(), e.details())) + + def _validate_entity_rows_for_batch_retrieval( + self, entity_rows, feature_sets_request + ): + """ + Validate whether an entity_row dataframe contains the correct information for batch retrieval + :param entity_rows: Pandas dataframe containing entities and datetime column. Each entity in a feature set + must be present as a column in this dataframe. + :param feature_sets_request: Feature sets that will + """ + + # Ensure datetime column exists + if "datetime" not in entity_rows.columns: + raise ValueError( + f'Entity rows does not contain "datetime" column in columns {entity_rows.columns}' + ) + + # Validate dataframe columns based on feature set entities + for feature_set in feature_sets_request: + fs = self.get_feature_set( + name=feature_set.name, version=feature_set.version + ) + if fs is None: + raise ValueError( + f'Feature set "{feature_set.name}:{feature_set.version}" could not be found' + ) + for entity_type in fs.entities: + if entity_type.name not in entity_rows.columns: + raise ValueError( + f'Dataframe does not contain entity "{entity_type.name}" column in columns "{entity_rows.columns}"' + ) + + def get_online_features( + self, + feature_ids: List[str], + entity_rows: List[GetOnlineFeaturesRequest.EntityRow], + ) -> GetOnlineFeaturesResponse: + """ + Retrieves the latest online feature data from Feast Serving + :param feature_ids: List of feature Ids in the following format + [feature_set_name]:[version]:[feature_name] + example: ["feature_set_1:6:my_feature_1", + "feature_set_1:6:my_feature_2",] + + :param entity_rows: List of GetFeaturesRequest.EntityRow where each row + contains entities. Timestamp should not be set for + online retrieval. All entity types within a feature + set must be provided for each entity key. + :return: Returns a list of maps where each item in the list contains + the latest feature values for the provided entities + """ + self._connect_serving() + + try: + response = self._serving_service_stub.GetOnlineFeatures( + GetOnlineFeaturesRequest( + feature_sets=_build_feature_set_request(feature_ids), + entity_rows=entity_rows, + ) + ) # type: GetOnlineFeaturesResponse + except grpc.RpcError as e: + print(format_grpc_exception("GetOnlineFeatures", e.code(), e.details())) + else: + return response + + def ingest( + self, + feature_set: Union[str, FeatureSet], + dataframe: pd.DataFrame, + version: int = None, + force_update: bool = False, + max_workers: int = CPU_COUNT, + disable_progress_bar: bool = False, + chunk_size: int = 5000, + ): + """ + Loads data into Feast for a specific feature set. + + :param feature_set: (str, FeatureSet) Feature set object or the + string name of the feature set (without a version) + :param dataframe: + Pandas dataframe to load into Feast for this feature set + :param + version: (int) Version of the feature set for which this ingestion + should happen + :param force_update: (bool) Automatically update + feature set based on data frame before ingesting data + :param max_workers: Number of + worker processes to use to encode the dataframe + :param + disable_progress_bar: Disable progress bar during ingestion + :param + chunk_size: Number of rows per chunk to encode before ingesting to + Feast + """ + if isinstance(feature_set, FeatureSet): + name = feature_set.name + if version is None: + version = feature_set.version + elif isinstance(feature_set, str): + name = feature_set + else: + raise Exception(f"Feature set name must be provided") + + feature_set = self.get_feature_set(name, version, fail_if_missing=True) + + # Update the feature set based on dataframe schema + if force_update: + feature_set.infer_fields_from_df( + dataframe, discard_unused_fields=True, replace_existing_features=True + ) + self.apply(feature_set) + + if feature_set.source.source_type == "Kafka": + ingest_kafka( + feature_set=feature_set, + dataframe=dataframe, + max_workers=max_workers, + disable_progress_bar=disable_progress_bar, + chunk_size=chunk_size, + ) + else: + raise Exception( + f"Could not determine source type for feature set " + f'"{feature_set.name}" with source type ' + f'"{feature_set.source.source_type}"' + ) + + +def _build_feature_set_request(feature_ids: List[str]) -> List[FeatureSetRequest]: + """ + Builds a list of FeatureSet objects from feature set ids in order to retrieve feature data from Feast Serving + """ + feature_set_request = dict() # type: Dict[str, FeatureSetRequest] + for feature_id in feature_ids: + fid_parts = feature_id.split(":") + if len(fid_parts) == 3: + feature_set, version, feature = fid_parts + else: + raise ValueError( + f"Could not parse feature id ${feature_id}, needs 3 colons" + ) + + if feature_set not in feature_set_request: + feature_set_request[feature_set] = FeatureSetRequest( + name=feature_set, version=int(version) + ) + feature_set_request[feature_set].feature_names.append(feature) + return list(feature_set_request.values()) diff --git a/sdk/python/feast/config.py b/sdk/python/feast/config.py new file mode 100644 index 00000000000..8eaa167755a --- /dev/null +++ b/sdk/python/feast/config.py @@ -0,0 +1,150 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from os.path import expanduser, join +import logging +import os +import sys +from typing import Dict +from urllib.parse import urlparse +from urllib.parse import ParseResult + +import toml + +_logger = logging.getLogger(__name__) + +feast_configuration_properties = {"core_url": "URL", "serving_url": "URL"} + +CONFIGURATION_FILE_DIR = os.environ.get("FEAST_CONFIG", ".feast") +CONFIGURATION_FILE_NAME = "config.toml" + + +def get_or_create_config() -> Dict: + """ + Creates or gets the Feast users active configuration + :return: dictionary of Feast properties + """ + + user_config_file_dir, user_config_file_path = _get_config_file_locations() + user_config_file_dir = user_config_file_dir.rstrip("/") + "/" + if not os.path.exists(os.path.dirname(user_config_file_dir)): + os.makedirs(os.path.dirname(user_config_file_dir)) + + if not os.path.isfile(user_config_file_path): + _save_config(user_config_file_path, _props_to_dict()) + + try: + return toml.load(user_config_file_path) + except FileNotFoundError: + _logger.error( + "Could not find Feast configuration file " + user_config_file_path + ) + sys.exit(1) + except toml.decoder.TomlDecodeError: + _logger.error( + "Could not decode Feast configuration file " + user_config_file_path + ) + sys.exit(1) + except Exception as e: + _logger.error(e) + sys.exit(1) + + +def set_property(prop: str, value: str): + """ + Sets a single property in the Feast users local configuration file + :param prop: Feast property name + :param value: Feast property value + """ + + if _is_valid_property(prop, value): + active_feast_config = get_or_create_config() + active_feast_config[prop] = value + _, user_config_file_path = _get_config_file_locations() + _save_config(user_config_file_path, active_feast_config) + print("Updated property [%s]" % prop) + else: + _logger.error("Invalid property selected") + sys.exit(1) + + +def get_config_property_or_fail(prop: str, cli_config: Dict[str, str] = None): + if ( + isinstance(cli_config, dict) + and prop in cli_config + and cli_config[prop] is not None + ): + return cli_config[prop] + + active_feast_config = get_or_create_config() + if _is_valid_property(prop, active_feast_config[prop]): + return active_feast_config[prop] + _logger.error("Could not load Feast property from configuration: %s" % prop) + sys.exit(1) + + +def _props_to_dict() -> Dict[str, str]: + prop_dict = {} + for prop in feast_configuration_properties: + prop_dict[prop] = "" + return prop_dict + + +def _is_valid_property(prop: str, value: str) -> bool: + """ + Validates both a Feast property as well as value + :param prop: Feast property name + :param value: Feast property value + :return: Returns True if property and value are valid + """ + + if prop not in feast_configuration_properties: + _logger.error("You are trying to set an invalid property") + sys.exit(1) + + prop_type = feast_configuration_properties[prop] + + if prop_type == "URL": + if "//" not in value: + value = "%s%s" % ("grpc://", value) + parsed_value = urlparse(value) # type: ParseResult + if parsed_value.netloc: + return True + + _logger.error("The property you are trying to set could not be identified") + sys.exit(1) + + +def _save_config(user_config_file_path: str, config_string: Dict[str, str]): + """ + Saves Feast configuration + :param user_config_file_path: Local file system path to save configuration + :param config_string: Contents in dictionary format to save to path + """ + + try: + with open(user_config_file_path, "w+") as f: + toml.dump(config_string, f) + except Exception as e: + _logger.error("Could not update configuration file for Feast") + print(e) + sys.exit(1) + + +def _get_config_file_locations() -> (str, str): + user_config_file_dir = join(expanduser("~"), CONFIGURATION_FILE_DIR) + user_config_file_path = join(user_config_file_dir, CONFIGURATION_FILE_NAME) + return user_config_file_dir, user_config_file_path diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py new file mode 100644 index 00000000000..9b001ac4067 --- /dev/null +++ b/sdk/python/feast/constants.py @@ -0,0 +1,17 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +DATETIME_COLUMN = "datetime" # type: str diff --git a/sdk/python/feast/core/CoreService_pb2.py b/sdk/python/feast/core/CoreService_pb2.py index 93212793dff..61781edbad6 100644 --- a/sdk/python/feast/core/CoreService_pb2.py +++ b/sdk/python/feast/core/CoreService_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/core/CoreService.proto @@ -12,36 +13,88 @@ _sym_db = _symbol_database.Default() -from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 -from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 -from feast.specs import FeatureGroupSpec_pb2 as feast_dot_specs_dot_FeatureGroupSpec__pb2 -from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from feast.core import FeatureSet_pb2 as feast_dot_core_dot_FeatureSet__pb2 +from feast.core import Store_pb2 as feast_dot_core_dot_Store__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='feast/core/CoreService.proto', package='feast.core', syntax='proto3', - serialized_options=_b('\n\nfeast.coreB\020CoreServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/core'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/core/CoreService.proto\x12\nfeast.core\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\"feast/specs/FeatureGroupSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xc9\x05\n\x10\x43oreServiceTypes\x1a!\n\x12GetEntitiesRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x1a@\n\x13GetEntitiesResponse\x12)\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x1a\x41\n\x14ListEntitiesResponse\x12)\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x1a!\n\x12GetFeaturesRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x1a\x41\n\x13GetFeaturesResponse\x12*\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x1a\x42\n\x14ListFeaturesResponse\x12*\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x1a \n\x11GetStorageRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x1a\x44\n\x12GetStorageResponse\x12.\n\x0cstorageSpecs\x18\x01 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x1a\x45\n\x13ListStorageResponse\x12.\n\x0cstorageSpecs\x18\x01 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x1a)\n\x13\x41pplyEntityResponse\x12\x12\n\nentityName\x18\x01 \x01(\t\x1a)\n\x14\x41pplyFeatureResponse\x12\x11\n\tfeatureId\x18\x01 \x01(\t\x1a\x33\n\x19\x41pplyFeatureGroupResponse\x12\x16\n\x0e\x66\x65\x61tureGroupId\x18\x01 \x01(\t\x1a)\n\x14\x41pplyStorageResponse\x12\x11\n\tstorageId\x18\x01 \x01(\t2\x83\x08\n\x0b\x43oreService\x12r\n\x0bGetEntities\x12/.feast.core.CoreServiceTypes.GetEntitiesRequest\x1a\x30.feast.core.CoreServiceTypes.GetEntitiesResponse\"\x00\x12[\n\x0cListEntities\x12\x16.google.protobuf.Empty\x1a\x31.feast.core.CoreServiceTypes.ListEntitiesResponse\"\x00\x12r\n\x0bGetFeatures\x12/.feast.core.CoreServiceTypes.GetFeaturesRequest\x1a\x30.feast.core.CoreServiceTypes.GetFeaturesResponse\"\x00\x12[\n\x0cListFeatures\x12\x16.google.protobuf.Empty\x1a\x31.feast.core.CoreServiceTypes.ListFeaturesResponse\"\x00\x12o\n\nGetStorage\x12..feast.core.CoreServiceTypes.GetStorageRequest\x1a/.feast.core.CoreServiceTypes.GetStorageResponse\"\x00\x12Y\n\x0bListStorage\x12\x16.google.protobuf.Empty\x1a\x30.feast.core.CoreServiceTypes.ListStorageResponse\"\x00\x12]\n\x0c\x41pplyFeature\x12\x18.feast.specs.FeatureSpec\x1a\x31.feast.core.CoreServiceTypes.ApplyFeatureResponse\"\x00\x12l\n\x11\x41pplyFeatureGroup\x12\x1d.feast.specs.FeatureGroupSpec\x1a\x36.feast.core.CoreServiceTypes.ApplyFeatureGroupResponse\"\x00\x12Z\n\x0b\x41pplyEntity\x12\x17.feast.specs.EntitySpec\x1a\x30.feast.core.CoreServiceTypes.ApplyEntityResponse\"\x00\x12]\n\x0c\x41pplyStorage\x12\x18.feast.specs.StorageSpec\x1a\x31.feast.core.CoreServiceTypes.ApplyStorageResponse\"\x00\x42U\n\nfeast.coreB\x10\x43oreServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/coreb\x06proto3') + serialized_options=_b('\n\nfeast.coreB\020CoreServiceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), + serialized_pb=_b('\n\x1c\x66\x65\x61st/core/CoreService.proto\x12\nfeast.core\x1a\x1b\x66\x65\x61st/core/FeatureSet.proto\x1a\x16\x66\x65\x61st/core/Store.proto\"5\n\x14GetFeatureSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"H\n\x15GetFeatureSetResponse\x12/\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\"\x94\x01\n\x16ListFeatureSetsRequest\x12\x39\n\x06\x66ilter\x18\x01 \x01(\x0b\x32).feast.core.ListFeatureSetsRequest.Filter\x1a?\n\x06\x46ilter\x12\x18\n\x10\x66\x65\x61ture_set_name\x18\x01 \x01(\t\x12\x1b\n\x13\x66\x65\x61ture_set_version\x18\x02 \x01(\t\"K\n\x17ListFeatureSetsResponse\x12\x30\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32\x1a.feast.core.FeatureSetSpec\"a\n\x11ListStoresRequest\x12\x34\n\x06\x66ilter\x18\x01 \x01(\x0b\x32$.feast.core.ListStoresRequest.Filter\x1a\x16\n\x06\x46ilter\x12\x0c\n\x04name\x18\x01 \x01(\t\"6\n\x12ListStoresResponse\x12 \n\x05store\x18\x01 \x03(\x0b\x32\x11.feast.core.Store\"I\n\x16\x41pplyFeatureSetRequest\x12/\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\"\xb7\x01\n\x17\x41pplyFeatureSetResponse\x12/\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\x12:\n\x06status\x18\x02 \x01(\x0e\x32*.feast.core.ApplyFeatureSetResponse.Status\"/\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"\x1c\n\x1aGetFeastCoreVersionRequest\".\n\x1bGetFeastCoreVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"6\n\x12UpdateStoreRequest\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\"\x95\x01\n\x13UpdateStoreResponse\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\x12\x36\n\x06status\x18\x02 \x01(\x0e\x32&.feast.core.UpdateStoreResponse.Status\"$\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07UPDATED\x10\x01\x32\xa0\x04\n\x0b\x43oreService\x12\x66\n\x13GetFeastCoreVersion\x12&.feast.core.GetFeastCoreVersionRequest\x1a\'.feast.core.GetFeastCoreVersionResponse\x12T\n\rGetFeatureSet\x12 .feast.core.GetFeatureSetRequest\x1a!.feast.core.GetFeatureSetResponse\x12Z\n\x0fListFeatureSets\x12\".feast.core.ListFeatureSetsRequest\x1a#.feast.core.ListFeatureSetsResponse\x12K\n\nListStores\x12\x1d.feast.core.ListStoresRequest\x1a\x1e.feast.core.ListStoresResponse\x12Z\n\x0f\x41pplyFeatureSet\x12\".feast.core.ApplyFeatureSetRequest\x1a#.feast.core.ApplyFeatureSetResponse\x12N\n\x0bUpdateStore\x12\x1e.feast.core.UpdateStoreRequest\x1a\x1f.feast.core.UpdateStoreResponseBO\n\nfeast.coreB\x10\x43oreServiceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') , - dependencies=[feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureGroupSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,]) + dependencies=[feast_dot_core_dot_FeatureSet__pb2.DESCRIPTOR,feast_dot_core_dot_Store__pb2.DESCRIPTOR,]) +_APPLYFEATURESETRESPONSE_STATUS = _descriptor.EnumDescriptor( + name='Status', + full_name='feast.core.ApplyFeatureSetResponse.Status', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NO_CHANGE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CREATED', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ERROR', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=821, + serialized_end=868, +) +_sym_db.RegisterEnumDescriptor(_APPLYFEATURESETRESPONSE_STATUS) + +_UPDATESTORERESPONSE_STATUS = _descriptor.EnumDescriptor( + name='Status', + full_name='feast.core.UpdateStoreResponse.Status', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NO_CHANGE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UPDATED', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1118, + serialized_end=1154, +) +_sym_db.RegisterEnumDescriptor(_UPDATESTORERESPONSE_STATUS) + -_CORESERVICETYPES_GETENTITIESREQUEST = _descriptor.Descriptor( - name='GetEntitiesRequest', - full_name='feast.core.CoreServiceTypes.GetEntitiesRequest', +_GETFEATURESETREQUEST = _descriptor.Descriptor( + name='GetFeatureSetRequest', + full_name='feast.core.GetFeatureSetRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='ids', full_name='feast.core.CoreServiceTypes.GetEntitiesRequest.ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], + name='name', full_name='feast.core.GetFeatureSetRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='feast.core.GetFeatureSetRequest.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -57,21 +110,22 @@ extension_ranges=[], oneofs=[ ], - serialized_start=222, - serialized_end=255, + serialized_start=97, + serialized_end=150, ) -_CORESERVICETYPES_GETENTITIESRESPONSE = _descriptor.Descriptor( - name='GetEntitiesResponse', - full_name='feast.core.CoreServiceTypes.GetEntitiesResponse', + +_GETFEATURESETRESPONSE = _descriptor.Descriptor( + name='GetFeatureSetResponse', + full_name='feast.core.GetFeatureSetResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='entities', full_name='feast.core.CoreServiceTypes.GetEntitiesResponse.entities', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], + name='feature_set', full_name='feast.core.GetFeatureSetResponse.feature_set', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -87,21 +141,29 @@ extension_ranges=[], oneofs=[ ], - serialized_start=257, - serialized_end=321, + serialized_start=152, + serialized_end=224, ) -_CORESERVICETYPES_LISTENTITIESRESPONSE = _descriptor.Descriptor( - name='ListEntitiesResponse', - full_name='feast.core.CoreServiceTypes.ListEntitiesResponse', + +_LISTFEATURESETSREQUEST_FILTER = _descriptor.Descriptor( + name='Filter', + full_name='feast.core.ListFeatureSetsRequest.Filter', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='entities', full_name='feast.core.CoreServiceTypes.ListEntitiesResponse.entities', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], + name='feature_set_name', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='feature_set_version', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -117,28 +179,28 @@ extension_ranges=[], oneofs=[ ], - serialized_start=323, - serialized_end=388, + serialized_start=312, + serialized_end=375, ) -_CORESERVICETYPES_GETFEATURESREQUEST = _descriptor.Descriptor( - name='GetFeaturesRequest', - full_name='feast.core.CoreServiceTypes.GetFeaturesRequest', +_LISTFEATURESETSREQUEST = _descriptor.Descriptor( + name='ListFeatureSetsRequest', + full_name='feast.core.ListFeatureSetsRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='ids', full_name='feast.core.CoreServiceTypes.GetFeaturesRequest.ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], + name='filter', full_name='feast.core.ListFeatureSetsRequest.filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], - nested_types=[], + nested_types=[_LISTFEATURESETSREQUEST_FILTER, ], enum_types=[ ], serialized_options=None, @@ -147,19 +209,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=390, - serialized_end=423, + serialized_start=227, + serialized_end=375, ) -_CORESERVICETYPES_GETFEATURESRESPONSE = _descriptor.Descriptor( - name='GetFeaturesResponse', - full_name='feast.core.CoreServiceTypes.GetFeaturesResponse', + +_LISTFEATURESETSRESPONSE = _descriptor.Descriptor( + name='ListFeatureSetsResponse', + full_name='feast.core.ListFeatureSetsResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='features', full_name='feast.core.CoreServiceTypes.GetFeaturesResponse.features', index=0, + name='feature_sets', full_name='feast.core.ListFeatureSetsResponse.feature_sets', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, @@ -177,21 +240,22 @@ extension_ranges=[], oneofs=[ ], - serialized_start=425, - serialized_end=490, + serialized_start=377, + serialized_end=452, ) -_CORESERVICETYPES_LISTFEATURESRESPONSE = _descriptor.Descriptor( - name='ListFeaturesResponse', - full_name='feast.core.CoreServiceTypes.ListFeaturesResponse', + +_LISTSTORESREQUEST_FILTER = _descriptor.Descriptor( + name='Filter', + full_name='feast.core.ListStoresRequest.Filter', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='features', full_name='feast.core.CoreServiceTypes.ListFeaturesResponse.features', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], + name='name', full_name='feast.core.ListStoresRequest.Filter.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -207,28 +271,28 @@ extension_ranges=[], oneofs=[ ], - serialized_start=492, - serialized_end=558, + serialized_start=529, + serialized_end=551, ) -_CORESERVICETYPES_GETSTORAGEREQUEST = _descriptor.Descriptor( - name='GetStorageRequest', - full_name='feast.core.CoreServiceTypes.GetStorageRequest', +_LISTSTORESREQUEST = _descriptor.Descriptor( + name='ListStoresRequest', + full_name='feast.core.ListStoresRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='ids', full_name='feast.core.CoreServiceTypes.GetStorageRequest.ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], + name='filter', full_name='feast.core.ListStoresRequest.filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], - nested_types=[], + nested_types=[_LISTSTORESREQUEST_FILTER, ], enum_types=[ ], serialized_options=None, @@ -237,19 +301,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=560, - serialized_end=592, + serialized_start=454, + serialized_end=551, ) -_CORESERVICETYPES_GETSTORAGERESPONSE = _descriptor.Descriptor( - name='GetStorageResponse', - full_name='feast.core.CoreServiceTypes.GetStorageResponse', + +_LISTSTORESRESPONSE = _descriptor.Descriptor( + name='ListStoresResponse', + full_name='feast.core.ListStoresResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='storageSpecs', full_name='feast.core.CoreServiceTypes.GetStorageResponse.storageSpecs', index=0, + name='store', full_name='feast.core.ListStoresResponse.store', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, @@ -267,21 +332,22 @@ extension_ranges=[], oneofs=[ ], - serialized_start=594, - serialized_end=662, + serialized_start=553, + serialized_end=607, ) -_CORESERVICETYPES_LISTSTORAGERESPONSE = _descriptor.Descriptor( - name='ListStorageResponse', - full_name='feast.core.CoreServiceTypes.ListStorageResponse', + +_APPLYFEATURESETREQUEST = _descriptor.Descriptor( + name='ApplyFeatureSetRequest', + full_name='feast.core.ApplyFeatureSetRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='storageSpecs', full_name='feast.core.CoreServiceTypes.ListStorageResponse.storageSpecs', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], + name='feature_set', full_name='feast.core.ApplyFeatureSetRequest.feature_set', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -297,21 +363,29 @@ extension_ranges=[], oneofs=[ ], - serialized_start=664, - serialized_end=733, + serialized_start=609, + serialized_end=682, ) -_CORESERVICETYPES_APPLYENTITYRESPONSE = _descriptor.Descriptor( - name='ApplyEntityResponse', - full_name='feast.core.CoreServiceTypes.ApplyEntityResponse', + +_APPLYFEATURESETRESPONSE = _descriptor.Descriptor( + name='ApplyFeatureSetResponse', + full_name='feast.core.ApplyFeatureSetResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='entityName', full_name='feast.core.CoreServiceTypes.ApplyEntityResponse.entityName', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + name='feature_set', full_name='feast.core.ApplyFeatureSetResponse.feature_set', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='status', full_name='feast.core.ApplyFeatureSetResponse.status', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -320,6 +394,7 @@ ], nested_types=[], enum_types=[ + _APPLYFEATURESETRESPONSE_STATUS, ], serialized_options=None, is_extendable=False, @@ -327,24 +402,18 @@ extension_ranges=[], oneofs=[ ], - serialized_start=735, - serialized_end=776, + serialized_start=685, + serialized_end=868, ) -_CORESERVICETYPES_APPLYFEATURERESPONSE = _descriptor.Descriptor( - name='ApplyFeatureResponse', - full_name='feast.core.CoreServiceTypes.ApplyFeatureResponse', + +_GETFEASTCOREVERSIONREQUEST = _descriptor.Descriptor( + name='GetFeastCoreVersionRequest', + full_name='feast.core.GetFeastCoreVersionRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ - _descriptor.FieldDescriptor( - name='featureId', full_name='feast.core.CoreServiceTypes.ApplyFeatureResponse.featureId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -357,19 +426,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=778, - serialized_end=819, + serialized_start=870, + serialized_end=898, ) -_CORESERVICETYPES_APPLYFEATUREGROUPRESPONSE = _descriptor.Descriptor( - name='ApplyFeatureGroupResponse', - full_name='feast.core.CoreServiceTypes.ApplyFeatureGroupResponse', + +_GETFEASTCOREVERSIONRESPONSE = _descriptor.Descriptor( + name='GetFeastCoreVersionResponse', + full_name='feast.core.GetFeastCoreVersionResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='featureGroupId', full_name='feast.core.CoreServiceTypes.ApplyFeatureGroupResponse.featureGroupId', index=0, + name='version', full_name='feast.core.GetFeastCoreVersionResponse.version', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, @@ -387,21 +457,22 @@ extension_ranges=[], oneofs=[ ], - serialized_start=821, - serialized_end=872, + serialized_start=900, + serialized_end=946, ) -_CORESERVICETYPES_APPLYSTORAGERESPONSE = _descriptor.Descriptor( - name='ApplyStorageResponse', - full_name='feast.core.CoreServiceTypes.ApplyStorageResponse', + +_UPDATESTOREREQUEST = _descriptor.Descriptor( + name='UpdateStoreRequest', + full_name='feast.core.UpdateStoreRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='storageId', full_name='feast.core.CoreServiceTypes.ApplyStorageResponse.storageId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + name='store', full_name='feast.core.UpdateStoreRequest.store', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -417,22 +488,38 @@ extension_ranges=[], oneofs=[ ], - serialized_start=874, - serialized_end=915, + serialized_start=948, + serialized_end=1002, ) -_CORESERVICETYPES = _descriptor.Descriptor( - name='CoreServiceTypes', - full_name='feast.core.CoreServiceTypes', + +_UPDATESTORERESPONSE = _descriptor.Descriptor( + name='UpdateStoreResponse', + full_name='feast.core.UpdateStoreResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ + _descriptor.FieldDescriptor( + name='store', full_name='feast.core.UpdateStoreResponse.store', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='status', full_name='feast.core.UpdateStoreResponse.status', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], - nested_types=[_CORESERVICETYPES_GETENTITIESREQUEST, _CORESERVICETYPES_GETENTITIESRESPONSE, _CORESERVICETYPES_LISTENTITIESRESPONSE, _CORESERVICETYPES_GETFEATURESREQUEST, _CORESERVICETYPES_GETFEATURESRESPONSE, _CORESERVICETYPES_LISTFEATURESRESPONSE, _CORESERVICETYPES_GETSTORAGEREQUEST, _CORESERVICETYPES_GETSTORAGERESPONSE, _CORESERVICETYPES_LISTSTORAGERESPONSE, _CORESERVICETYPES_APPLYENTITYRESPONSE, _CORESERVICETYPES_APPLYFEATURERESPONSE, _CORESERVICETYPES_APPLYFEATUREGROUPRESPONSE, _CORESERVICETYPES_APPLYSTORAGERESPONSE, ], + nested_types=[], enum_types=[ + _UPDATESTORERESPONSE_STATUS, ], serialized_options=None, is_extendable=False, @@ -440,142 +527,138 @@ extension_ranges=[], oneofs=[ ], - serialized_start=202, - serialized_end=915, + serialized_start=1005, + serialized_end=1154, ) -_CORESERVICETYPES_GETENTITIESREQUEST.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_GETENTITIESRESPONSE.fields_by_name['entities'].message_type = feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC -_CORESERVICETYPES_GETENTITIESRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_LISTENTITIESRESPONSE.fields_by_name['entities'].message_type = feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC -_CORESERVICETYPES_LISTENTITIESRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_GETFEATURESREQUEST.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_GETFEATURESRESPONSE.fields_by_name['features'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC -_CORESERVICETYPES_GETFEATURESRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_LISTFEATURESRESPONSE.fields_by_name['features'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC -_CORESERVICETYPES_LISTFEATURESRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_GETSTORAGEREQUEST.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_GETSTORAGERESPONSE.fields_by_name['storageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -_CORESERVICETYPES_GETSTORAGERESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_LISTSTORAGERESPONSE.fields_by_name['storageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -_CORESERVICETYPES_LISTSTORAGERESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_APPLYENTITYRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_APPLYFEATURERESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_APPLYFEATUREGROUPRESPONSE.containing_type = _CORESERVICETYPES -_CORESERVICETYPES_APPLYSTORAGERESPONSE.containing_type = _CORESERVICETYPES -DESCRIPTOR.message_types_by_name['CoreServiceTypes'] = _CORESERVICETYPES +_GETFEATURESETRESPONSE.fields_by_name['feature_set'].message_type = feast_dot_core_dot_FeatureSet__pb2._FEATURESETSPEC +_LISTFEATURESETSREQUEST_FILTER.containing_type = _LISTFEATURESETSREQUEST +_LISTFEATURESETSREQUEST.fields_by_name['filter'].message_type = _LISTFEATURESETSREQUEST_FILTER +_LISTFEATURESETSRESPONSE.fields_by_name['feature_sets'].message_type = feast_dot_core_dot_FeatureSet__pb2._FEATURESETSPEC +_LISTSTORESREQUEST_FILTER.containing_type = _LISTSTORESREQUEST +_LISTSTORESREQUEST.fields_by_name['filter'].message_type = _LISTSTORESREQUEST_FILTER +_LISTSTORESRESPONSE.fields_by_name['store'].message_type = feast_dot_core_dot_Store__pb2._STORE +_APPLYFEATURESETREQUEST.fields_by_name['feature_set'].message_type = feast_dot_core_dot_FeatureSet__pb2._FEATURESETSPEC +_APPLYFEATURESETRESPONSE.fields_by_name['feature_set'].message_type = feast_dot_core_dot_FeatureSet__pb2._FEATURESETSPEC +_APPLYFEATURESETRESPONSE.fields_by_name['status'].enum_type = _APPLYFEATURESETRESPONSE_STATUS +_APPLYFEATURESETRESPONSE_STATUS.containing_type = _APPLYFEATURESETRESPONSE +_UPDATESTOREREQUEST.fields_by_name['store'].message_type = feast_dot_core_dot_Store__pb2._STORE +_UPDATESTORERESPONSE.fields_by_name['store'].message_type = feast_dot_core_dot_Store__pb2._STORE +_UPDATESTORERESPONSE.fields_by_name['status'].enum_type = _UPDATESTORERESPONSE_STATUS +_UPDATESTORERESPONSE_STATUS.containing_type = _UPDATESTORERESPONSE +DESCRIPTOR.message_types_by_name['GetFeatureSetRequest'] = _GETFEATURESETREQUEST +DESCRIPTOR.message_types_by_name['GetFeatureSetResponse'] = _GETFEATURESETRESPONSE +DESCRIPTOR.message_types_by_name['ListFeatureSetsRequest'] = _LISTFEATURESETSREQUEST +DESCRIPTOR.message_types_by_name['ListFeatureSetsResponse'] = _LISTFEATURESETSRESPONSE +DESCRIPTOR.message_types_by_name['ListStoresRequest'] = _LISTSTORESREQUEST +DESCRIPTOR.message_types_by_name['ListStoresResponse'] = _LISTSTORESRESPONSE +DESCRIPTOR.message_types_by_name['ApplyFeatureSetRequest'] = _APPLYFEATURESETREQUEST +DESCRIPTOR.message_types_by_name['ApplyFeatureSetResponse'] = _APPLYFEATURESETRESPONSE +DESCRIPTOR.message_types_by_name['GetFeastCoreVersionRequest'] = _GETFEASTCOREVERSIONREQUEST +DESCRIPTOR.message_types_by_name['GetFeastCoreVersionResponse'] = _GETFEASTCOREVERSIONRESPONSE +DESCRIPTOR.message_types_by_name['UpdateStoreRequest'] = _UPDATESTOREREQUEST +DESCRIPTOR.message_types_by_name['UpdateStoreResponse'] = _UPDATESTORERESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -CoreServiceTypes = _reflection.GeneratedProtocolMessageType('CoreServiceTypes', (_message.Message,), dict( - - GetEntitiesRequest = _reflection.GeneratedProtocolMessageType('GetEntitiesRequest', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETENTITIESREQUEST, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetEntitiesRequest) - )) - , - - GetEntitiesResponse = _reflection.GeneratedProtocolMessageType('GetEntitiesResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETENTITIESRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetEntitiesResponse) - )) +GetFeatureSetRequest = _reflection.GeneratedProtocolMessageType('GetFeatureSetRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETFEATURESETREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.GetFeatureSetRequest) + }) +_sym_db.RegisterMessage(GetFeatureSetRequest) + +GetFeatureSetResponse = _reflection.GeneratedProtocolMessageType('GetFeatureSetResponse', (_message.Message,), { + 'DESCRIPTOR' : _GETFEATURESETRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.GetFeatureSetResponse) + }) +_sym_db.RegisterMessage(GetFeatureSetResponse) + +ListFeatureSetsRequest = _reflection.GeneratedProtocolMessageType('ListFeatureSetsRequest', (_message.Message,), { + + 'Filter' : _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), { + 'DESCRIPTOR' : _LISTFEATURESETSREQUEST_FILTER, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListFeatureSetsRequest.Filter) + }) , - - ListEntitiesResponse = _reflection.GeneratedProtocolMessageType('ListEntitiesResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_LISTENTITIESRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ListEntitiesResponse) - )) + 'DESCRIPTOR' : _LISTFEATURESETSREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListFeatureSetsRequest) + }) +_sym_db.RegisterMessage(ListFeatureSetsRequest) +_sym_db.RegisterMessage(ListFeatureSetsRequest.Filter) + +ListFeatureSetsResponse = _reflection.GeneratedProtocolMessageType('ListFeatureSetsResponse', (_message.Message,), { + 'DESCRIPTOR' : _LISTFEATURESETSRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListFeatureSetsResponse) + }) +_sym_db.RegisterMessage(ListFeatureSetsResponse) + +ListStoresRequest = _reflection.GeneratedProtocolMessageType('ListStoresRequest', (_message.Message,), { + + 'Filter' : _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), { + 'DESCRIPTOR' : _LISTSTORESREQUEST_FILTER, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListStoresRequest.Filter) + }) , - - GetFeaturesRequest = _reflection.GeneratedProtocolMessageType('GetFeaturesRequest', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETFEATURESREQUEST, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetFeaturesRequest) - )) - , - - GetFeaturesResponse = _reflection.GeneratedProtocolMessageType('GetFeaturesResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETFEATURESRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetFeaturesResponse) - )) - , - - ListFeaturesResponse = _reflection.GeneratedProtocolMessageType('ListFeaturesResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_LISTFEATURESRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ListFeaturesResponse) - )) - , - - GetStorageRequest = _reflection.GeneratedProtocolMessageType('GetStorageRequest', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETSTORAGEREQUEST, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetStorageRequest) - )) - , - - GetStorageResponse = _reflection.GeneratedProtocolMessageType('GetStorageResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_GETSTORAGERESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.GetStorageResponse) - )) - , - - ListStorageResponse = _reflection.GeneratedProtocolMessageType('ListStorageResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_LISTSTORAGERESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ListStorageResponse) - )) - , - - ApplyEntityResponse = _reflection.GeneratedProtocolMessageType('ApplyEntityResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_APPLYENTITYRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ApplyEntityResponse) - )) - , - - ApplyFeatureResponse = _reflection.GeneratedProtocolMessageType('ApplyFeatureResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_APPLYFEATURERESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ApplyFeatureResponse) - )) - , - - ApplyFeatureGroupResponse = _reflection.GeneratedProtocolMessageType('ApplyFeatureGroupResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_APPLYFEATUREGROUPRESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ApplyFeatureGroupResponse) - )) - , - - ApplyStorageResponse = _reflection.GeneratedProtocolMessageType('ApplyStorageResponse', (_message.Message,), dict( - DESCRIPTOR = _CORESERVICETYPES_APPLYSTORAGERESPONSE, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes.ApplyStorageResponse) - )) - , - DESCRIPTOR = _CORESERVICETYPES, - __module__ = 'feast.core.CoreService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.CoreServiceTypes) - )) -_sym_db.RegisterMessage(CoreServiceTypes) -_sym_db.RegisterMessage(CoreServiceTypes.GetEntitiesRequest) -_sym_db.RegisterMessage(CoreServiceTypes.GetEntitiesResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ListEntitiesResponse) -_sym_db.RegisterMessage(CoreServiceTypes.GetFeaturesRequest) -_sym_db.RegisterMessage(CoreServiceTypes.GetFeaturesResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ListFeaturesResponse) -_sym_db.RegisterMessage(CoreServiceTypes.GetStorageRequest) -_sym_db.RegisterMessage(CoreServiceTypes.GetStorageResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ListStorageResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ApplyEntityResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ApplyFeatureResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ApplyFeatureGroupResponse) -_sym_db.RegisterMessage(CoreServiceTypes.ApplyStorageResponse) + 'DESCRIPTOR' : _LISTSTORESREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListStoresRequest) + }) +_sym_db.RegisterMessage(ListStoresRequest) +_sym_db.RegisterMessage(ListStoresRequest.Filter) + +ListStoresResponse = _reflection.GeneratedProtocolMessageType('ListStoresResponse', (_message.Message,), { + 'DESCRIPTOR' : _LISTSTORESRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListStoresResponse) + }) +_sym_db.RegisterMessage(ListStoresResponse) + +ApplyFeatureSetRequest = _reflection.GeneratedProtocolMessageType('ApplyFeatureSetRequest', (_message.Message,), { + 'DESCRIPTOR' : _APPLYFEATURESETREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ApplyFeatureSetRequest) + }) +_sym_db.RegisterMessage(ApplyFeatureSetRequest) + +ApplyFeatureSetResponse = _reflection.GeneratedProtocolMessageType('ApplyFeatureSetResponse', (_message.Message,), { + 'DESCRIPTOR' : _APPLYFEATURESETRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ApplyFeatureSetResponse) + }) +_sym_db.RegisterMessage(ApplyFeatureSetResponse) + +GetFeastCoreVersionRequest = _reflection.GeneratedProtocolMessageType('GetFeastCoreVersionRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETFEASTCOREVERSIONREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.GetFeastCoreVersionRequest) + }) +_sym_db.RegisterMessage(GetFeastCoreVersionRequest) + +GetFeastCoreVersionResponse = _reflection.GeneratedProtocolMessageType('GetFeastCoreVersionResponse', (_message.Message,), { + 'DESCRIPTOR' : _GETFEASTCOREVERSIONRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.GetFeastCoreVersionResponse) + }) +_sym_db.RegisterMessage(GetFeastCoreVersionResponse) + +UpdateStoreRequest = _reflection.GeneratedProtocolMessageType('UpdateStoreRequest', (_message.Message,), { + 'DESCRIPTOR' : _UPDATESTOREREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.UpdateStoreRequest) + }) +_sym_db.RegisterMessage(UpdateStoreRequest) + +UpdateStoreResponse = _reflection.GeneratedProtocolMessageType('UpdateStoreResponse', (_message.Message,), { + 'DESCRIPTOR' : _UPDATESTORERESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.UpdateStoreResponse) + }) +_sym_db.RegisterMessage(UpdateStoreResponse) DESCRIPTOR._options = None @@ -586,97 +669,61 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=918, - serialized_end=1945, + serialized_start=1157, + serialized_end=1701, methods=[ _descriptor.MethodDescriptor( - name='GetEntities', - full_name='feast.core.CoreService.GetEntities', + name='GetFeastCoreVersion', + full_name='feast.core.CoreService.GetFeastCoreVersion', index=0, containing_service=None, - input_type=_CORESERVICETYPES_GETENTITIESREQUEST, - output_type=_CORESERVICETYPES_GETENTITIESRESPONSE, + input_type=_GETFEASTCOREVERSIONREQUEST, + output_type=_GETFEASTCOREVERSIONRESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( - name='ListEntities', - full_name='feast.core.CoreService.ListEntities', + name='GetFeatureSet', + full_name='feast.core.CoreService.GetFeatureSet', index=1, containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_CORESERVICETYPES_LISTENTITIESRESPONSE, + input_type=_GETFEATURESETREQUEST, + output_type=_GETFEATURESETRESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( - name='GetFeatures', - full_name='feast.core.CoreService.GetFeatures', + name='ListFeatureSets', + full_name='feast.core.CoreService.ListFeatureSets', index=2, containing_service=None, - input_type=_CORESERVICETYPES_GETFEATURESREQUEST, - output_type=_CORESERVICETYPES_GETFEATURESRESPONSE, + input_type=_LISTFEATURESETSREQUEST, + output_type=_LISTFEATURESETSRESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( - name='ListFeatures', - full_name='feast.core.CoreService.ListFeatures', + name='ListStores', + full_name='feast.core.CoreService.ListStores', index=3, containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_CORESERVICETYPES_LISTFEATURESRESPONSE, + input_type=_LISTSTORESREQUEST, + output_type=_LISTSTORESRESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( - name='GetStorage', - full_name='feast.core.CoreService.GetStorage', + name='ApplyFeatureSet', + full_name='feast.core.CoreService.ApplyFeatureSet', index=4, containing_service=None, - input_type=_CORESERVICETYPES_GETSTORAGEREQUEST, - output_type=_CORESERVICETYPES_GETSTORAGERESPONSE, + input_type=_APPLYFEATURESETREQUEST, + output_type=_APPLYFEATURESETRESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( - name='ListStorage', - full_name='feast.core.CoreService.ListStorage', + name='UpdateStore', + full_name='feast.core.CoreService.UpdateStore', index=5, containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_CORESERVICETYPES_LISTSTORAGERESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ApplyFeature', - full_name='feast.core.CoreService.ApplyFeature', - index=6, - containing_service=None, - input_type=feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC, - output_type=_CORESERVICETYPES_APPLYFEATURERESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ApplyFeatureGroup', - full_name='feast.core.CoreService.ApplyFeatureGroup', - index=7, - containing_service=None, - input_type=feast_dot_specs_dot_FeatureGroupSpec__pb2._FEATUREGROUPSPEC, - output_type=_CORESERVICETYPES_APPLYFEATUREGROUPRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ApplyEntity', - full_name='feast.core.CoreService.ApplyEntity', - index=8, - containing_service=None, - input_type=feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC, - output_type=_CORESERVICETYPES_APPLYENTITYRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ApplyStorage', - full_name='feast.core.CoreService.ApplyStorage', - index=9, - containing_service=None, - input_type=feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC, - output_type=_CORESERVICETYPES_APPLYSTORAGERESPONSE, + input_type=_UPDATESTOREREQUEST, + output_type=_UPDATESTORERESPONSE, serialized_options=None, ), ]) diff --git a/sdk/python/feast/core/CoreService_pb2.pyi b/sdk/python/feast/core/CoreService_pb2.pyi new file mode 100644 index 00000000000..0f5d4b288e5 --- /dev/null +++ b/sdk/python/feast/core/CoreService_pb2.pyi @@ -0,0 +1,343 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.core.FeatureSet_pb2 import ( + FeatureSetSpec as feast___core___FeatureSet_pb2___FeatureSetSpec, +) + +from feast.core.Store_pb2 import ( + Store as feast___core___Store_pb2___Store, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class GetFeatureSetRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + version = ... # type: typing___Text + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + version : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeatureSetRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"version"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"version",b"version"]) -> None: ... + +class GetFeatureSetResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def feature_set(self) -> feast___core___FeatureSet_pb2___FeatureSetSpec: ... + + def __init__(self, + *, + feature_set : typing___Optional[feast___core___FeatureSet_pb2___FeatureSetSpec] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeatureSetResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> None: ... + +class ListFeatureSetsRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class Filter(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + feature_set_name = ... # type: typing___Text + feature_set_version = ... # type: typing___Text + + def __init__(self, + *, + feature_set_name : typing___Optional[typing___Text] = None, + feature_set_version : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListFeatureSetsRequest.Filter: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",u"feature_set_version"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",b"feature_set_name",u"feature_set_version",b"feature_set_version"]) -> None: ... + + + @property + def filter(self) -> ListFeatureSetsRequest.Filter: ... + + def __init__(self, + *, + filter : typing___Optional[ListFeatureSetsRequest.Filter] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListFeatureSetsRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"filter"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> None: ... + +class ListFeatureSetsResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def feature_sets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___core___FeatureSet_pb2___FeatureSetSpec]: ... + + def __init__(self, + *, + feature_sets : typing___Optional[typing___Iterable[feast___core___FeatureSet_pb2___FeatureSetSpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListFeatureSetsResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"feature_sets"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"feature_sets",b"feature_sets"]) -> None: ... + +class ListStoresRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class Filter(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListStoresRequest.Filter: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> None: ... + + + @property + def filter(self) -> ListStoresRequest.Filter: ... + + def __init__(self, + *, + filter : typing___Optional[ListStoresRequest.Filter] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListStoresRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"filter"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> None: ... + +class ListStoresResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def store(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___core___Store_pb2___Store]: ... + + def __init__(self, + *, + store : typing___Optional[typing___Iterable[feast___core___Store_pb2___Store]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListStoresResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"store"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> None: ... + +class ApplyFeatureSetRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def feature_set(self) -> feast___core___FeatureSet_pb2___FeatureSetSpec: ... + + def __init__(self, + *, + feature_set : typing___Optional[feast___core___FeatureSet_pb2___FeatureSetSpec] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ApplyFeatureSetRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> None: ... + +class ApplyFeatureSetResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class Status(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> ApplyFeatureSetResponse.Status: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[ApplyFeatureSetResponse.Status]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, ApplyFeatureSetResponse.Status]]: ... + NO_CHANGE = typing___cast(ApplyFeatureSetResponse.Status, 0) + CREATED = typing___cast(ApplyFeatureSetResponse.Status, 1) + ERROR = typing___cast(ApplyFeatureSetResponse.Status, 2) + NO_CHANGE = typing___cast(ApplyFeatureSetResponse.Status, 0) + CREATED = typing___cast(ApplyFeatureSetResponse.Status, 1) + ERROR = typing___cast(ApplyFeatureSetResponse.Status, 2) + + status = ... # type: ApplyFeatureSetResponse.Status + + @property + def feature_set(self) -> feast___core___FeatureSet_pb2___FeatureSetSpec: ... + + def __init__(self, + *, + feature_set : typing___Optional[feast___core___FeatureSet_pb2___FeatureSetSpec] = None, + status : typing___Optional[ApplyFeatureSetResponse.Status] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ApplyFeatureSetResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",u"status"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set",u"status",b"status"]) -> None: ... + +class GetFeastCoreVersionRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeastCoreVersionRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class GetFeastCoreVersionResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + version = ... # type: typing___Text + + def __init__(self, + *, + version : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeastCoreVersionResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"version"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"version",b"version"]) -> None: ... + +class UpdateStoreRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def store(self) -> feast___core___Store_pb2___Store: ... + + def __init__(self, + *, + store : typing___Optional[feast___core___Store_pb2___Store] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UpdateStoreRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"store"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> None: ... + +class UpdateStoreResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class Status(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> UpdateStoreResponse.Status: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[UpdateStoreResponse.Status]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, UpdateStoreResponse.Status]]: ... + NO_CHANGE = typing___cast(UpdateStoreResponse.Status, 0) + UPDATED = typing___cast(UpdateStoreResponse.Status, 1) + NO_CHANGE = typing___cast(UpdateStoreResponse.Status, 0) + UPDATED = typing___cast(UpdateStoreResponse.Status, 1) + + status = ... # type: UpdateStoreResponse.Status + + @property + def store(self) -> feast___core___Store_pb2___Store: ... + + def __init__(self, + *, + store : typing___Optional[feast___core___Store_pb2___Store] = None, + status : typing___Optional[UpdateStoreResponse.Status] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UpdateStoreResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"status",u"store"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"status",b"status",u"store",b"store"]) -> None: ... diff --git a/sdk/python/feast/core/CoreService_pb2_grpc.py b/sdk/python/feast/core/CoreService_pb2_grpc.py index 6331551fda6..c4d28087791 100644 --- a/sdk/python/feast/core/CoreService_pb2_grpc.py +++ b/sdk/python/feast/core/CoreService_pb2_grpc.py @@ -2,11 +2,6 @@ import grpc from feast.core import CoreService_pb2 as feast_dot_core_dot_CoreService__pb2 -from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 -from feast.specs import FeatureGroupSpec_pb2 as feast_dot_specs_dot_FeatureGroupSpec__pb2 -from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 -from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class CoreServiceStub(object): @@ -19,55 +14,35 @@ def __init__(self, channel): Args: channel: A grpc.Channel. """ - self.GetEntities = channel.unary_unary( - '/feast.core.CoreService/GetEntities', - request_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetEntitiesRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetEntitiesResponse.FromString, + self.GetFeastCoreVersion = channel.unary_unary( + '/feast.core.CoreService/GetFeastCoreVersion', + request_serializer=feast_dot_core_dot_CoreService__pb2.GetFeastCoreVersionRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.GetFeastCoreVersionResponse.FromString, ) - self.ListEntities = channel.unary_unary( - '/feast.core.CoreService/ListEntities', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListEntitiesResponse.FromString, + self.GetFeatureSet = channel.unary_unary( + '/feast.core.CoreService/GetFeatureSet', + request_serializer=feast_dot_core_dot_CoreService__pb2.GetFeatureSetRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.GetFeatureSetResponse.FromString, ) - self.GetFeatures = channel.unary_unary( - '/feast.core.CoreService/GetFeatures', - request_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetFeaturesRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetFeaturesResponse.FromString, + self.ListFeatureSets = channel.unary_unary( + '/feast.core.CoreService/ListFeatureSets', + request_serializer=feast_dot_core_dot_CoreService__pb2.ListFeatureSetsRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.ListFeatureSetsResponse.FromString, ) - self.ListFeatures = channel.unary_unary( - '/feast.core.CoreService/ListFeatures', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListFeaturesResponse.FromString, + self.ListStores = channel.unary_unary( + '/feast.core.CoreService/ListStores', + request_serializer=feast_dot_core_dot_CoreService__pb2.ListStoresRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.ListStoresResponse.FromString, ) - self.GetStorage = channel.unary_unary( - '/feast.core.CoreService/GetStorage', - request_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetStorageRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetStorageResponse.FromString, + self.ApplyFeatureSet = channel.unary_unary( + '/feast.core.CoreService/ApplyFeatureSet', + request_serializer=feast_dot_core_dot_CoreService__pb2.ApplyFeatureSetRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.ApplyFeatureSetResponse.FromString, ) - self.ListStorage = channel.unary_unary( - '/feast.core.CoreService/ListStorage', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListStorageResponse.FromString, - ) - self.ApplyFeature = channel.unary_unary( - '/feast.core.CoreService/ApplyFeature', - request_serializer=feast_dot_specs_dot_FeatureSpec__pb2.FeatureSpec.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyFeatureResponse.FromString, - ) - self.ApplyFeatureGroup = channel.unary_unary( - '/feast.core.CoreService/ApplyFeatureGroup', - request_serializer=feast_dot_specs_dot_FeatureGroupSpec__pb2.FeatureGroupSpec.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyFeatureGroupResponse.FromString, - ) - self.ApplyEntity = channel.unary_unary( - '/feast.core.CoreService/ApplyEntity', - request_serializer=feast_dot_specs_dot_EntitySpec__pb2.EntitySpec.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyEntityResponse.FromString, - ) - self.ApplyStorage = channel.unary_unary( - '/feast.core.CoreService/ApplyStorage', - request_serializer=feast_dot_specs_dot_StorageSpec__pb2.StorageSpec.SerializeToString, - response_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyStorageResponse.FromString, + self.UpdateStore = channel.unary_unary( + '/feast.core.CoreService/UpdateStore', + request_serializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreResponse.FromString, ) @@ -75,91 +50,59 @@ class CoreServiceServicer(object): # missing associated documentation comment in .proto file pass - def GetEntities(self, request, context): - """ - Get entities specified in request. - This process returns a list of entity specs. + def GetFeastCoreVersion(self, request, context): + """Retrieve version information about this Feast deployment """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def ListEntities(self, request, context): - """ - Get all entities - This process returns a list of entity specs. + def GetFeatureSet(self, request, context): + """Returns a specific feature set """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def GetFeatures(self, request, context): - """ - Get features specified in request. - This process returns a list of feature specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + def ListFeatureSets(self, request, context): + """Retrieve feature set details given a filter. - def ListFeatures(self, request, context): - """ - Get all features. - This process returns a list of entity specs. + Returns all feature sets matching that filter. If none are found, + an empty list will be returned. + If no filter is provided in the request, the response will contain all the feature + sets currently stored in the registry. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def GetStorage(self, request, context): - """ - Get storage specs specified in request. - This process returns a list of storage specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + def ListStores(self, request, context): + """Retrieve store details given a filter. - def ListStorage(self, request, context): - """ - Get all storage specs. - This process returns a list of storage specs. + Returns all stores matching that filter. If none are found, an empty list will be returned. + If no filter is provided in the request, the response will contain all the stores currently + stored in the registry. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def ApplyFeature(self, request, context): - """ - Register a new feature to the metadata store, or update an existing feature. - If any validation errors occur, only the first encountered error will be returned. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + def ApplyFeatureSet(self, request, context): + """Create or update and existing feature set. - def ApplyFeatureGroup(self, request, context): - """ - Register a new feature group to the metadata store, or update an existing feature group. - If any validation errors occur, only the first encountered error will be returned. + This function is idempotent - it will not create a new feature set if schema does not change. + If an existing feature set is updated, core will advance the version number, which will be + returned in response. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def ApplyEntity(self, request, context): - """ - Register a new entity to the metadata store, or update an existing entity. - If any validation errors occur, only the first encountered error will be returned. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + def UpdateStore(self, request, context): + """Updates core with the configuration of the store. - def ApplyStorage(self, request, context): - """ - Register a new storage spec to the metadata store, or update an existing storage. - If any validation errors occur, only the first encountered error will be returned. + If the changes are valid, core will return the given store configuration in response, and + start or update the necessary feature population jobs for the updated store. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -168,55 +111,35 @@ def ApplyStorage(self, request, context): def add_CoreServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'GetEntities': grpc.unary_unary_rpc_method_handler( - servicer.GetEntities, - request_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetEntitiesRequest.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetEntitiesResponse.SerializeToString, - ), - 'ListEntities': grpc.unary_unary_rpc_method_handler( - servicer.ListEntities, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListEntitiesResponse.SerializeToString, - ), - 'GetFeatures': grpc.unary_unary_rpc_method_handler( - servicer.GetFeatures, - request_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetFeaturesRequest.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetFeaturesResponse.SerializeToString, - ), - 'ListFeatures': grpc.unary_unary_rpc_method_handler( - servicer.ListFeatures, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListFeaturesResponse.SerializeToString, - ), - 'GetStorage': grpc.unary_unary_rpc_method_handler( - servicer.GetStorage, - request_deserializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetStorageRequest.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.GetStorageResponse.SerializeToString, + 'GetFeastCoreVersion': grpc.unary_unary_rpc_method_handler( + servicer.GetFeastCoreVersion, + request_deserializer=feast_dot_core_dot_CoreService__pb2.GetFeastCoreVersionRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.GetFeastCoreVersionResponse.SerializeToString, ), - 'ListStorage': grpc.unary_unary_rpc_method_handler( - servicer.ListStorage, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ListStorageResponse.SerializeToString, + 'GetFeatureSet': grpc.unary_unary_rpc_method_handler( + servicer.GetFeatureSet, + request_deserializer=feast_dot_core_dot_CoreService__pb2.GetFeatureSetRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.GetFeatureSetResponse.SerializeToString, ), - 'ApplyFeature': grpc.unary_unary_rpc_method_handler( - servicer.ApplyFeature, - request_deserializer=feast_dot_specs_dot_FeatureSpec__pb2.FeatureSpec.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyFeatureResponse.SerializeToString, + 'ListFeatureSets': grpc.unary_unary_rpc_method_handler( + servicer.ListFeatureSets, + request_deserializer=feast_dot_core_dot_CoreService__pb2.ListFeatureSetsRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.ListFeatureSetsResponse.SerializeToString, ), - 'ApplyFeatureGroup': grpc.unary_unary_rpc_method_handler( - servicer.ApplyFeatureGroup, - request_deserializer=feast_dot_specs_dot_FeatureGroupSpec__pb2.FeatureGroupSpec.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyFeatureGroupResponse.SerializeToString, + 'ListStores': grpc.unary_unary_rpc_method_handler( + servicer.ListStores, + request_deserializer=feast_dot_core_dot_CoreService__pb2.ListStoresRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.ListStoresResponse.SerializeToString, ), - 'ApplyEntity': grpc.unary_unary_rpc_method_handler( - servicer.ApplyEntity, - request_deserializer=feast_dot_specs_dot_EntitySpec__pb2.EntitySpec.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyEntityResponse.SerializeToString, + 'ApplyFeatureSet': grpc.unary_unary_rpc_method_handler( + servicer.ApplyFeatureSet, + request_deserializer=feast_dot_core_dot_CoreService__pb2.ApplyFeatureSetRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.ApplyFeatureSetResponse.SerializeToString, ), - 'ApplyStorage': grpc.unary_unary_rpc_method_handler( - servicer.ApplyStorage, - request_deserializer=feast_dot_specs_dot_StorageSpec__pb2.StorageSpec.FromString, - response_serializer=feast_dot_core_dot_CoreService__pb2.CoreServiceTypes.ApplyStorageResponse.SerializeToString, + 'UpdateStore': grpc.unary_unary_rpc_method_handler( + servicer.UpdateStore, + request_deserializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( diff --git a/sdk/python/feast/core/DatasetService_pb2.py b/sdk/python/feast/core/DatasetService_pb2.py deleted file mode 100644 index 0c94041a4b1..00000000000 --- a/sdk/python/feast/core/DatasetService_pb2.py +++ /dev/null @@ -1,346 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/core/DatasetService.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/core/DatasetService.proto', - package='feast.core', - syntax='proto3', - serialized_options=_b('\n\nfeast.coreB\023DatasetServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/core'), - serialized_pb=_b('\n\x1f\x66\x65\x61st/core/DatasetService.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa4\x03\n\x13\x44\x61tasetServiceTypes\x1a\xc5\x02\n\x14\x43reateDatasetRequest\x12*\n\nfeatureSet\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\x12-\n\tstartDate\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07\x65ndDate\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05limit\x18\x04 \x01(\x03\x12\x12\n\nnamePrefix\x18\x05 \x01(\t\x12R\n\x07\x66ilters\x18\x06 \x03(\x0b\x32\x41.feast.core.DatasetServiceTypes.CreateDatasetRequest.FiltersEntry\x1a.\n\x0c\x46iltersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x45\n\x15\x43reateDatasetResponse\x12,\n\x0b\x64\x61tasetInfo\x18\x01 \x01(\x0b\x32\x17.feast.core.DatasetInfo\"4\n\nFeatureSet\x12\x12\n\nentityName\x18\x01 \x01(\t\x12\x12\n\nfeatureIds\x18\x02 \x03(\t\"-\n\x0b\x44\x61tasetInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08tableUrl\x18\x02 \x01(\t2\x90\x01\n\x0e\x44\x61tasetService\x12~\n\rCreateDataset\x12\x34.feast.core.DatasetServiceTypes.CreateDatasetRequest\x1a\x35.feast.core.DatasetServiceTypes.CreateDatasetResponse\"\x00\x42X\n\nfeast.coreB\x13\x44\x61tasetServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/coreb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY = _descriptor.Descriptor( - name='FiltersEntry', - full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.FiltersEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.FiltersEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.FiltersEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=384, - serialized_end=430, -) - -_DATASETSERVICETYPES_CREATEDATASETREQUEST = _descriptor.Descriptor( - name='CreateDatasetRequest', - full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='featureSet', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.featureSet', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='startDate', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.startDate', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='endDate', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.endDate', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='limit', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.limit', index=3, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='namePrefix', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.namePrefix', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filters', full_name='feast.core.DatasetServiceTypes.CreateDatasetRequest.filters', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=105, - serialized_end=430, -) - -_DATASETSERVICETYPES_CREATEDATASETRESPONSE = _descriptor.Descriptor( - name='CreateDatasetResponse', - full_name='feast.core.DatasetServiceTypes.CreateDatasetResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='datasetInfo', full_name='feast.core.DatasetServiceTypes.CreateDatasetResponse.datasetInfo', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=432, - serialized_end=501, -) - -_DATASETSERVICETYPES = _descriptor.Descriptor( - name='DatasetServiceTypes', - full_name='feast.core.DatasetServiceTypes', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[_DATASETSERVICETYPES_CREATEDATASETREQUEST, _DATASETSERVICETYPES_CREATEDATASETRESPONSE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=81, - serialized_end=501, -) - - -_FEATURESET = _descriptor.Descriptor( - name='FeatureSet', - full_name='feast.core.FeatureSet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entityName', full_name='feast.core.FeatureSet.entityName', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='featureIds', full_name='feast.core.FeatureSet.featureIds', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=503, - serialized_end=555, -) - - -_DATASETINFO = _descriptor.Descriptor( - name='DatasetInfo', - full_name='feast.core.DatasetInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='feast.core.DatasetInfo.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='tableUrl', full_name='feast.core.DatasetInfo.tableUrl', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=557, - serialized_end=602, -) - -_DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY.containing_type = _DATASETSERVICETYPES_CREATEDATASETREQUEST -_DATASETSERVICETYPES_CREATEDATASETREQUEST.fields_by_name['featureSet'].message_type = _FEATURESET -_DATASETSERVICETYPES_CREATEDATASETREQUEST.fields_by_name['startDate'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DATASETSERVICETYPES_CREATEDATASETREQUEST.fields_by_name['endDate'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DATASETSERVICETYPES_CREATEDATASETREQUEST.fields_by_name['filters'].message_type = _DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY -_DATASETSERVICETYPES_CREATEDATASETREQUEST.containing_type = _DATASETSERVICETYPES -_DATASETSERVICETYPES_CREATEDATASETRESPONSE.fields_by_name['datasetInfo'].message_type = _DATASETINFO -_DATASETSERVICETYPES_CREATEDATASETRESPONSE.containing_type = _DATASETSERVICETYPES -DESCRIPTOR.message_types_by_name['DatasetServiceTypes'] = _DATASETSERVICETYPES -DESCRIPTOR.message_types_by_name['FeatureSet'] = _FEATURESET -DESCRIPTOR.message_types_by_name['DatasetInfo'] = _DATASETINFO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DatasetServiceTypes = _reflection.GeneratedProtocolMessageType('DatasetServiceTypes', (_message.Message,), dict( - - CreateDatasetRequest = _reflection.GeneratedProtocolMessageType('CreateDatasetRequest', (_message.Message,), dict( - - FiltersEntry = _reflection.GeneratedProtocolMessageType('FiltersEntry', (_message.Message,), dict( - DESCRIPTOR = _DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.DatasetServiceTypes.CreateDatasetRequest.FiltersEntry) - )) - , - DESCRIPTOR = _DATASETSERVICETYPES_CREATEDATASETREQUEST, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.DatasetServiceTypes.CreateDatasetRequest) - )) - , - - CreateDatasetResponse = _reflection.GeneratedProtocolMessageType('CreateDatasetResponse', (_message.Message,), dict( - DESCRIPTOR = _DATASETSERVICETYPES_CREATEDATASETRESPONSE, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.DatasetServiceTypes.CreateDatasetResponse) - )) - , - DESCRIPTOR = _DATASETSERVICETYPES, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.DatasetServiceTypes) - )) -_sym_db.RegisterMessage(DatasetServiceTypes) -_sym_db.RegisterMessage(DatasetServiceTypes.CreateDatasetRequest) -_sym_db.RegisterMessage(DatasetServiceTypes.CreateDatasetRequest.FiltersEntry) -_sym_db.RegisterMessage(DatasetServiceTypes.CreateDatasetResponse) - -FeatureSet = _reflection.GeneratedProtocolMessageType('FeatureSet', (_message.Message,), dict( - DESCRIPTOR = _FEATURESET, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.FeatureSet) - )) -_sym_db.RegisterMessage(FeatureSet) - -DatasetInfo = _reflection.GeneratedProtocolMessageType('DatasetInfo', (_message.Message,), dict( - DESCRIPTOR = _DATASETINFO, - __module__ = 'feast.core.DatasetService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.DatasetInfo) - )) -_sym_db.RegisterMessage(DatasetInfo) - - -DESCRIPTOR._options = None -_DATASETSERVICETYPES_CREATEDATASETREQUEST_FILTERSENTRY._options = None - -_DATASETSERVICE = _descriptor.ServiceDescriptor( - name='DatasetService', - full_name='feast.core.DatasetService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=605, - serialized_end=749, - methods=[ - _descriptor.MethodDescriptor( - name='CreateDataset', - full_name='feast.core.DatasetService.CreateDataset', - index=0, - containing_service=None, - input_type=_DATASETSERVICETYPES_CREATEDATASETREQUEST, - output_type=_DATASETSERVICETYPES_CREATEDATASETRESPONSE, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_DATASETSERVICE) - -DESCRIPTOR.services_by_name['DatasetService'] = _DATASETSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/DatasetService_pb2_grpc.py b/sdk/python/feast/core/DatasetService_pb2_grpc.py deleted file mode 100644 index f6b714f5651..00000000000 --- a/sdk/python/feast/core/DatasetService_pb2_grpc.py +++ /dev/null @@ -1,46 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from feast.core import DatasetService_pb2 as feast_dot_core_dot_DatasetService__pb2 - - -class DatasetServiceStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateDataset = channel.unary_unary( - '/feast.core.DatasetService/CreateDataset', - request_serializer=feast_dot_core_dot_DatasetService__pb2.DatasetServiceTypes.CreateDatasetRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_DatasetService__pb2.DatasetServiceTypes.CreateDatasetResponse.FromString, - ) - - -class DatasetServiceServicer(object): - # missing associated documentation comment in .proto file - pass - - def CreateDataset(self, request, context): - """Create training dataset for a feature set - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_DatasetServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateDataset': grpc.unary_unary_rpc_method_handler( - servicer.CreateDataset, - request_deserializer=feast_dot_core_dot_DatasetService__pb2.DatasetServiceTypes.CreateDatasetRequest.FromString, - response_serializer=feast_dot_core_dot_DatasetService__pb2.DatasetServiceTypes.CreateDatasetResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'feast.core.DatasetService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/sdk/python/feast/core/FeatureSet_pb2.py b/sdk/python/feast/core/FeatureSet_pb2.py new file mode 100644 index 00000000000..8c331db16b1 --- /dev/null +++ b/sdk/python/feast/core/FeatureSet_pb2.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureSet.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.core import Source_pb2 as feast_dot_core_dot_Source__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/core/FeatureSet.proto', + package='feast.core', + syntax='proto3', + serialized_options=_b('\n\nfeast.coreB\017FeatureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), + serialized_pb=_b('\n\x1b\x66\x65\x61st/core/FeatureSet.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x17\x66\x65\x61st/core/Source.proto\x1a\x1egoogle/protobuf/duration.proto\"\xd4\x01\n\x0e\x46\x65\x61tureSetSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12(\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x16.feast.core.EntitySpec\x12)\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x17.feast.core.FeatureSpec\x12*\n\x07max_age\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\"\n\x06source\x18\x06 \x01(\x0b\x32\x12.feast.core.Source\"K\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"L\n\x0b\x46\x65\x61tureSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.EnumBN\n\nfeast.coreB\x0f\x46\x65\x61tureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + , + dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,feast_dot_core_dot_Source__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,]) + + + + +_FEATURESETSPEC = _descriptor.Descriptor( + name='FeatureSetSpec', + full_name='feast.core.FeatureSetSpec', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.FeatureSetSpec.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='feast.core.FeatureSetSpec.version', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entities', full_name='feast.core.FeatureSetSpec.entities', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='features', full_name='feast.core.FeatureSetSpec.features', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_age', full_name='feast.core.FeatureSetSpec.max_age', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='feast.core.FeatureSetSpec.source', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=126, + serialized_end=338, +) + + +_ENTITYSPEC = _descriptor.Descriptor( + name='EntitySpec', + full_name='feast.core.EntitySpec', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.EntitySpec.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value_type', full_name='feast.core.EntitySpec.value_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=340, + serialized_end=415, +) + + +_FEATURESPEC = _descriptor.Descriptor( + name='FeatureSpec', + full_name='feast.core.FeatureSpec', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.FeatureSpec.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value_type', full_name='feast.core.FeatureSpec.value_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=417, + serialized_end=493, +) + +_FEATURESETSPEC.fields_by_name['entities'].message_type = _ENTITYSPEC +_FEATURESETSPEC.fields_by_name['features'].message_type = _FEATURESPEC +_FEATURESETSPEC.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_FEATURESETSPEC.fields_by_name['source'].message_type = feast_dot_core_dot_Source__pb2._SOURCE +_ENTITYSPEC.fields_by_name['value_type'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM +_FEATURESPEC.fields_by_name['value_type'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM +DESCRIPTOR.message_types_by_name['FeatureSetSpec'] = _FEATURESETSPEC +DESCRIPTOR.message_types_by_name['EntitySpec'] = _ENTITYSPEC +DESCRIPTOR.message_types_by_name['FeatureSpec'] = _FEATURESPEC +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +FeatureSetSpec = _reflection.GeneratedProtocolMessageType('FeatureSetSpec', (_message.Message,), { + 'DESCRIPTOR' : _FEATURESETSPEC, + '__module__' : 'feast.core.FeatureSet_pb2' + # @@protoc_insertion_point(class_scope:feast.core.FeatureSetSpec) + }) +_sym_db.RegisterMessage(FeatureSetSpec) + +EntitySpec = _reflection.GeneratedProtocolMessageType('EntitySpec', (_message.Message,), { + 'DESCRIPTOR' : _ENTITYSPEC, + '__module__' : 'feast.core.FeatureSet_pb2' + # @@protoc_insertion_point(class_scope:feast.core.EntitySpec) + }) +_sym_db.RegisterMessage(EntitySpec) + +FeatureSpec = _reflection.GeneratedProtocolMessageType('FeatureSpec', (_message.Message,), { + 'DESCRIPTOR' : _FEATURESPEC, + '__module__' : 'feast.core.FeatureSet_pb2' + # @@protoc_insertion_point(class_scope:feast.core.FeatureSpec) + }) +_sym_db.RegisterMessage(FeatureSpec) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/FeatureSet_pb2.pyi b/sdk/python/feast/core/FeatureSet_pb2.pyi new file mode 100644 index 00000000000..5d93721fe16 --- /dev/null +++ b/sdk/python/feast/core/FeatureSet_pb2.pyi @@ -0,0 +1,111 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.core.Source_pb2 import ( + Source as feast___core___Source_pb2___Source, +) + +from feast.types.Value_pb2 import ( + ValueType as feast___types___Value_pb2___ValueType, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.duration_pb2 import ( + Duration as google___protobuf___duration_pb2___Duration, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeatureSetSpec(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + version = ... # type: int + + @property + def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[EntitySpec]: ... + + @property + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureSpec]: ... + + @property + def max_age(self) -> google___protobuf___duration_pb2___Duration: ... + + @property + def source(self) -> feast___core___Source_pb2___Source: ... + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + version : typing___Optional[int] = None, + entities : typing___Optional[typing___Iterable[EntitySpec]] = None, + features : typing___Optional[typing___Iterable[FeatureSpec]] = None, + max_age : typing___Optional[google___protobuf___duration_pb2___Duration] = None, + source : typing___Optional[feast___core___Source_pb2___Source] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSetSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"max_age",u"source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"features",u"max_age",u"name",u"source",u"version"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"source",b"source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entities",b"entities",u"features",b"features",u"max_age",b"max_age",u"name",b"name",u"source",b"source",u"version",b"version"]) -> None: ... + +class EntitySpec(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + value_type = ... # type: feast___types___Value_pb2___ValueType.Enum + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + value_type : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> EntitySpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value_type"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value_type",b"value_type"]) -> None: ... + +class FeatureSpec(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + value_type = ... # type: feast___types___Value_pb2___ValueType.Enum + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + value_type : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value_type"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value_type",b"value_type"]) -> None: ... diff --git a/sdk/python/feast/core/FeatureSet_pb2_grpc.py b/sdk/python/feast/core/FeatureSet_pb2_grpc.py new file mode 100644 index 00000000000..a89435267cb --- /dev/null +++ b/sdk/python/feast/core/FeatureSet_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/sdk/python/feast/core/JobService_pb2.py b/sdk/python/feast/core/JobService_pb2.py deleted file mode 100644 index fd208380484..00000000000 --- a/sdk/python/feast/core/JobService_pb2.py +++ /dev/null @@ -1,553 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/core/JobService.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from feast.specs import ImportSpec_pb2 as feast_dot_specs_dot_ImportSpec__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/core/JobService.proto', - package='feast.core', - syntax='proto3', - serialized_options=_b('\n\nfeast.coreB\017JobServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/core'), - serialized_pb=_b('\n\x1b\x66\x65\x61st/core/JobService.proto\x12\nfeast.core\x1a\x1c\x66\x65\x61st/specs/ImportSpec.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc9\x05\n\x0fJobServiceTypes\x1aS\n\x16SubmitImportJobRequest\x12+\n\nimportSpec\x18\x01 \x01(\x0b\x32\x17.feast.specs.ImportSpec\x12\x0c\n\x04name\x18\x02 \x01(\t\x1a(\n\x17SubmitImportJobResponse\x12\r\n\x05jobId\x18\x01 \x01(\t\x1aG\n\x10ListJobsResponse\x12\x33\n\x04jobs\x18\x01 \x03(\x0b\x32%.feast.core.JobServiceTypes.JobDetail\x1a\x1b\n\rGetJobRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1a\x44\n\x0eGetJobResponse\x12\x32\n\x03job\x18\x01 \x01(\x0b\x32%.feast.core.JobServiceTypes.JobDetail\x1a\x1d\n\x0f\x41\x62ortJobRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1a\x1e\n\x10\x41\x62ortJobResponse\x12\n\n\x02id\x18\x01 \x01(\t\x1a\xcb\x02\n\tJobDetail\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05\x65xtId\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x0e\n\x06runner\x18\x04 \x01(\t\x12\x0e\n\x06status\x18\x05 \x01(\t\x12\x10\n\x08\x65ntities\x18\x06 \x03(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x07 \x03(\t\x12\x43\n\x07metrics\x18\x08 \x03(\x0b\x32\x32.feast.core.JobServiceTypes.JobDetail.MetricsEntry\x12/\n\x0blastUpdated\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07\x63reated\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a.\n\x0cMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x32\x9c\x03\n\nJobService\x12t\n\tSubmitJob\x12\x32.feast.core.JobServiceTypes.SubmitImportJobRequest\x1a\x33.feast.core.JobServiceTypes.SubmitImportJobResponse\x12P\n\x08ListJobs\x12\x16.google.protobuf.Empty\x1a,.feast.core.JobServiceTypes.ListJobsResponse\x12_\n\x06GetJob\x12).feast.core.JobServiceTypes.GetJobRequest\x1a*.feast.core.JobServiceTypes.GetJobResponse\x12\x65\n\x08\x41\x62ortJob\x12+.feast.core.JobServiceTypes.AbortJobRequest\x1a,.feast.core.JobServiceTypes.AbortJobResponseBT\n\nfeast.coreB\x0fJobServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/coreb\x06proto3') - , - dependencies=[feast_dot_specs_dot_ImportSpec__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST = _descriptor.Descriptor( - name='SubmitImportJobRequest', - full_name='feast.core.JobServiceTypes.SubmitImportJobRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='importSpec', full_name='feast.core.JobServiceTypes.SubmitImportJobRequest.importSpec', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='feast.core.JobServiceTypes.SubmitImportJobRequest.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=155, - serialized_end=238, -) - -_JOBSERVICETYPES_SUBMITIMPORTJOBRESPONSE = _descriptor.Descriptor( - name='SubmitImportJobResponse', - full_name='feast.core.JobServiceTypes.SubmitImportJobResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='jobId', full_name='feast.core.JobServiceTypes.SubmitImportJobResponse.jobId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=240, - serialized_end=280, -) - -_JOBSERVICETYPES_LISTJOBSRESPONSE = _descriptor.Descriptor( - name='ListJobsResponse', - full_name='feast.core.JobServiceTypes.ListJobsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='jobs', full_name='feast.core.JobServiceTypes.ListJobsResponse.jobs', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=282, - serialized_end=353, -) - -_JOBSERVICETYPES_GETJOBREQUEST = _descriptor.Descriptor( - name='GetJobRequest', - full_name='feast.core.JobServiceTypes.GetJobRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.JobServiceTypes.GetJobRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=355, - serialized_end=382, -) - -_JOBSERVICETYPES_GETJOBRESPONSE = _descriptor.Descriptor( - name='GetJobResponse', - full_name='feast.core.JobServiceTypes.GetJobResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='job', full_name='feast.core.JobServiceTypes.GetJobResponse.job', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=384, - serialized_end=452, -) - -_JOBSERVICETYPES_ABORTJOBREQUEST = _descriptor.Descriptor( - name='AbortJobRequest', - full_name='feast.core.JobServiceTypes.AbortJobRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.JobServiceTypes.AbortJobRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=454, - serialized_end=483, -) - -_JOBSERVICETYPES_ABORTJOBRESPONSE = _descriptor.Descriptor( - name='AbortJobResponse', - full_name='feast.core.JobServiceTypes.AbortJobResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.JobServiceTypes.AbortJobResponse.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=485, - serialized_end=515, -) - -_JOBSERVICETYPES_JOBDETAIL_METRICSENTRY = _descriptor.Descriptor( - name='MetricsEntry', - full_name='feast.core.JobServiceTypes.JobDetail.MetricsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.core.JobServiceTypes.JobDetail.MetricsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.core.JobServiceTypes.JobDetail.MetricsEntry.value', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=803, - serialized_end=849, -) - -_JOBSERVICETYPES_JOBDETAIL = _descriptor.Descriptor( - name='JobDetail', - full_name='feast.core.JobServiceTypes.JobDetail', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.JobServiceTypes.JobDetail.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extId', full_name='feast.core.JobServiceTypes.JobDetail.extId', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='type', full_name='feast.core.JobServiceTypes.JobDetail.type', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='runner', full_name='feast.core.JobServiceTypes.JobDetail.runner', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='status', full_name='feast.core.JobServiceTypes.JobDetail.status', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entities', full_name='feast.core.JobServiceTypes.JobDetail.entities', index=5, - number=6, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='features', full_name='feast.core.JobServiceTypes.JobDetail.features', index=6, - number=7, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='metrics', full_name='feast.core.JobServiceTypes.JobDetail.metrics', index=7, - number=8, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lastUpdated', full_name='feast.core.JobServiceTypes.JobDetail.lastUpdated', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='created', full_name='feast.core.JobServiceTypes.JobDetail.created', index=9, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_JOBSERVICETYPES_JOBDETAIL_METRICSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=518, - serialized_end=849, -) - -_JOBSERVICETYPES = _descriptor.Descriptor( - name='JobServiceTypes', - full_name='feast.core.JobServiceTypes', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[_JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST, _JOBSERVICETYPES_SUBMITIMPORTJOBRESPONSE, _JOBSERVICETYPES_LISTJOBSRESPONSE, _JOBSERVICETYPES_GETJOBREQUEST, _JOBSERVICETYPES_GETJOBRESPONSE, _JOBSERVICETYPES_ABORTJOBREQUEST, _JOBSERVICETYPES_ABORTJOBRESPONSE, _JOBSERVICETYPES_JOBDETAIL, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=136, - serialized_end=849, -) - -_JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST.fields_by_name['importSpec'].message_type = feast_dot_specs_dot_ImportSpec__pb2._IMPORTSPEC -_JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_SUBMITIMPORTJOBRESPONSE.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_LISTJOBSRESPONSE.fields_by_name['jobs'].message_type = _JOBSERVICETYPES_JOBDETAIL -_JOBSERVICETYPES_LISTJOBSRESPONSE.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_GETJOBREQUEST.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_GETJOBRESPONSE.fields_by_name['job'].message_type = _JOBSERVICETYPES_JOBDETAIL -_JOBSERVICETYPES_GETJOBRESPONSE.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_ABORTJOBREQUEST.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_ABORTJOBRESPONSE.containing_type = _JOBSERVICETYPES -_JOBSERVICETYPES_JOBDETAIL_METRICSENTRY.containing_type = _JOBSERVICETYPES_JOBDETAIL -_JOBSERVICETYPES_JOBDETAIL.fields_by_name['metrics'].message_type = _JOBSERVICETYPES_JOBDETAIL_METRICSENTRY -_JOBSERVICETYPES_JOBDETAIL.fields_by_name['lastUpdated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_JOBSERVICETYPES_JOBDETAIL.fields_by_name['created'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_JOBSERVICETYPES_JOBDETAIL.containing_type = _JOBSERVICETYPES -DESCRIPTOR.message_types_by_name['JobServiceTypes'] = _JOBSERVICETYPES -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -JobServiceTypes = _reflection.GeneratedProtocolMessageType('JobServiceTypes', (_message.Message,), dict( - - SubmitImportJobRequest = _reflection.GeneratedProtocolMessageType('SubmitImportJobRequest', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.SubmitImportJobRequest) - )) - , - - SubmitImportJobResponse = _reflection.GeneratedProtocolMessageType('SubmitImportJobResponse', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_SUBMITIMPORTJOBRESPONSE, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.SubmitImportJobResponse) - )) - , - - ListJobsResponse = _reflection.GeneratedProtocolMessageType('ListJobsResponse', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_LISTJOBSRESPONSE, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.ListJobsResponse) - )) - , - - GetJobRequest = _reflection.GeneratedProtocolMessageType('GetJobRequest', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_GETJOBREQUEST, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.GetJobRequest) - )) - , - - GetJobResponse = _reflection.GeneratedProtocolMessageType('GetJobResponse', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_GETJOBRESPONSE, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.GetJobResponse) - )) - , - - AbortJobRequest = _reflection.GeneratedProtocolMessageType('AbortJobRequest', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_ABORTJOBREQUEST, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.AbortJobRequest) - )) - , - - AbortJobResponse = _reflection.GeneratedProtocolMessageType('AbortJobResponse', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_ABORTJOBRESPONSE, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.AbortJobResponse) - )) - , - - JobDetail = _reflection.GeneratedProtocolMessageType('JobDetail', (_message.Message,), dict( - - MetricsEntry = _reflection.GeneratedProtocolMessageType('MetricsEntry', (_message.Message,), dict( - DESCRIPTOR = _JOBSERVICETYPES_JOBDETAIL_METRICSENTRY, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.JobDetail.MetricsEntry) - )) - , - DESCRIPTOR = _JOBSERVICETYPES_JOBDETAIL, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes.JobDetail) - )) - , - DESCRIPTOR = _JOBSERVICETYPES, - __module__ = 'feast.core.JobService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.JobServiceTypes) - )) -_sym_db.RegisterMessage(JobServiceTypes) -_sym_db.RegisterMessage(JobServiceTypes.SubmitImportJobRequest) -_sym_db.RegisterMessage(JobServiceTypes.SubmitImportJobResponse) -_sym_db.RegisterMessage(JobServiceTypes.ListJobsResponse) -_sym_db.RegisterMessage(JobServiceTypes.GetJobRequest) -_sym_db.RegisterMessage(JobServiceTypes.GetJobResponse) -_sym_db.RegisterMessage(JobServiceTypes.AbortJobRequest) -_sym_db.RegisterMessage(JobServiceTypes.AbortJobResponse) -_sym_db.RegisterMessage(JobServiceTypes.JobDetail) -_sym_db.RegisterMessage(JobServiceTypes.JobDetail.MetricsEntry) - - -DESCRIPTOR._options = None -_JOBSERVICETYPES_JOBDETAIL_METRICSENTRY._options = None - -_JOBSERVICE = _descriptor.ServiceDescriptor( - name='JobService', - full_name='feast.core.JobService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=852, - serialized_end=1264, - methods=[ - _descriptor.MethodDescriptor( - name='SubmitJob', - full_name='feast.core.JobService.SubmitJob', - index=0, - containing_service=None, - input_type=_JOBSERVICETYPES_SUBMITIMPORTJOBREQUEST, - output_type=_JOBSERVICETYPES_SUBMITIMPORTJOBRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ListJobs', - full_name='feast.core.JobService.ListJobs', - index=1, - containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_JOBSERVICETYPES_LISTJOBSRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='GetJob', - full_name='feast.core.JobService.GetJob', - index=2, - containing_service=None, - input_type=_JOBSERVICETYPES_GETJOBREQUEST, - output_type=_JOBSERVICETYPES_GETJOBRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='AbortJob', - full_name='feast.core.JobService.AbortJob', - index=3, - containing_service=None, - input_type=_JOBSERVICETYPES_ABORTJOBREQUEST, - output_type=_JOBSERVICETYPES_ABORTJOBRESPONSE, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_JOBSERVICE) - -DESCRIPTOR.services_by_name['JobService'] = _JOBSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/JobService_pb2_grpc.py b/sdk/python/feast/core/JobService_pb2_grpc.py deleted file mode 100644 index 4a1ebf66c03..00000000000 --- a/sdk/python/feast/core/JobService_pb2_grpc.py +++ /dev/null @@ -1,98 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from feast.core import JobService_pb2 as feast_dot_core_dot_JobService__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class JobServiceStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.SubmitJob = channel.unary_unary( - '/feast.core.JobService/SubmitJob', - request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobResponse.FromString, - ) - self.ListJobs = channel.unary_unary( - '/feast.core.JobService/ListJobs', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.ListJobsResponse.FromString, - ) - self.GetJob = channel.unary_unary( - '/feast.core.JobService/GetJob', - request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobResponse.FromString, - ) - self.AbortJob = channel.unary_unary( - '/feast.core.JobService/AbortJob', - request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobResponse.FromString, - ) - - -class JobServiceServicer(object): - # missing associated documentation comment in .proto file - pass - - def SubmitJob(self, request, context): - """Submit a job to feast to run. Returns the job id. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListJobs(self, request, context): - """List all jobs submitted to feast. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetJob(self, request, context): - """Get Job with ID - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AbortJob(self, request, context): - """Abort job with given ID - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_JobServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'SubmitJob': grpc.unary_unary_rpc_method_handler( - servicer.SubmitJob, - request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobRequest.FromString, - response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobResponse.SerializeToString, - ), - 'ListJobs': grpc.unary_unary_rpc_method_handler( - servicer.ListJobs, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.ListJobsResponse.SerializeToString, - ), - 'GetJob': grpc.unary_unary_rpc_method_handler( - servicer.GetJob, - request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobRequest.FromString, - response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobResponse.SerializeToString, - ), - 'AbortJob': grpc.unary_unary_rpc_method_handler( - servicer.AbortJob, - request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobRequest.FromString, - response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'feast.core.JobService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/sdk/python/feast/core/Source_pb2.py b/sdk/python/feast/core/Source_pb2.py new file mode 100644 index 00000000000..e0d0dd64313 --- /dev/null +++ b/sdk/python/feast/core/Source_pb2.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Source.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/core/Source.proto', + package='feast.core', + syntax='proto3', + serialized_options=_b('\n\nfeast.coreB\013SourceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), + serialized_pb=_b('\n\x17\x66\x65\x61st/core/Source.proto\x12\nfeast.core\"}\n\x06Source\x12$\n\x04type\x18\x01 \x01(\x0e\x32\x16.feast.core.SourceType\x12<\n\x13kafka_source_config\x18\x02 \x01(\x0b\x32\x1d.feast.core.KafkaSourceConfigH\x00\x42\x0f\n\rsource_config\"=\n\x11KafkaSourceConfig\x12\x19\n\x11\x62ootstrap_servers\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t*$\n\nSourceType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05KAFKA\x10\x01\x42J\n\nfeast.coreB\x0bSourceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') +) + +_SOURCETYPE = _descriptor.EnumDescriptor( + name='SourceType', + full_name='feast.core.SourceType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KAFKA', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=229, + serialized_end=265, +) +_sym_db.RegisterEnumDescriptor(_SOURCETYPE) + +SourceType = enum_type_wrapper.EnumTypeWrapper(_SOURCETYPE) +INVALID = 0 +KAFKA = 1 + + + +_SOURCE = _descriptor.Descriptor( + name='Source', + full_name='feast.core.Source', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='feast.core.Source.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='kafka_source_config', full_name='feast.core.Source.kafka_source_config', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='source_config', full_name='feast.core.Source.source_config', + index=0, containing_type=None, fields=[]), + ], + serialized_start=39, + serialized_end=164, +) + + +_KAFKASOURCECONFIG = _descriptor.Descriptor( + name='KafkaSourceConfig', + full_name='feast.core.KafkaSourceConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bootstrap_servers', full_name='feast.core.KafkaSourceConfig.bootstrap_servers', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='topic', full_name='feast.core.KafkaSourceConfig.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=166, + serialized_end=227, +) + +_SOURCE.fields_by_name['type'].enum_type = _SOURCETYPE +_SOURCE.fields_by_name['kafka_source_config'].message_type = _KAFKASOURCECONFIG +_SOURCE.oneofs_by_name['source_config'].fields.append( + _SOURCE.fields_by_name['kafka_source_config']) +_SOURCE.fields_by_name['kafka_source_config'].containing_oneof = _SOURCE.oneofs_by_name['source_config'] +DESCRIPTOR.message_types_by_name['Source'] = _SOURCE +DESCRIPTOR.message_types_by_name['KafkaSourceConfig'] = _KAFKASOURCECONFIG +DESCRIPTOR.enum_types_by_name['SourceType'] = _SOURCETYPE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), { + 'DESCRIPTOR' : _SOURCE, + '__module__' : 'feast.core.Source_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Source) + }) +_sym_db.RegisterMessage(Source) + +KafkaSourceConfig = _reflection.GeneratedProtocolMessageType('KafkaSourceConfig', (_message.Message,), { + 'DESCRIPTOR' : _KAFKASOURCECONFIG, + '__module__' : 'feast.core.Source_pb2' + # @@protoc_insertion_point(class_scope:feast.core.KafkaSourceConfig) + }) +_sym_db.RegisterMessage(KafkaSourceConfig) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/Source_pb2.pyi b/sdk/python/feast/core/Source_pb2.pyi new file mode 100644 index 00000000000..0521ac34f80 --- /dev/null +++ b/sdk/python/feast/core/Source_pb2.pyi @@ -0,0 +1,83 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + List as typing___List, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class SourceType(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> SourceType: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[SourceType]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, SourceType]]: ... + INVALID = typing___cast(SourceType, 0) + KAFKA = typing___cast(SourceType, 1) +INVALID = typing___cast(SourceType, 0) +KAFKA = typing___cast(SourceType, 1) + +class Source(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + type = ... # type: SourceType + + @property + def kafka_source_config(self) -> KafkaSourceConfig: ... + + def __init__(self, + *, + type : typing___Optional[SourceType] = None, + kafka_source_config : typing___Optional[KafkaSourceConfig] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Source: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",u"source_config"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"kafka_source_config",u"source_config",u"type"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",b"kafka_source_config",u"source_config",b"source_config"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"kafka_source_config",b"kafka_source_config",u"source_config",b"source_config",u"type",b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"source_config",b"source_config"]) -> typing_extensions___Literal["kafka_source_config"]: ... + +class KafkaSourceConfig(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + bootstrap_servers = ... # type: typing___Text + topic = ... # type: typing___Text + + def __init__(self, + *, + bootstrap_servers : typing___Optional[typing___Text] = None, + topic : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> KafkaSourceConfig: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"bootstrap_servers",u"topic"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"bootstrap_servers",b"bootstrap_servers",u"topic",b"topic"]) -> None: ... diff --git a/sdk/python/feast/core/Source_pb2_grpc.py b/sdk/python/feast/core/Source_pb2_grpc.py new file mode 100644 index 00000000000..a89435267cb --- /dev/null +++ b/sdk/python/feast/core/Source_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/sdk/python/feast/core/Store_pb2.py b/sdk/python/feast/core/Store_pb2.py new file mode 100644 index 00000000000..c7f9e07d871 --- /dev/null +++ b/sdk/python/feast/core/Store_pb2.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Store.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/core/Store.proto', + package='feast.core', + syntax='proto3', + serialized_options=_b('\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), + serialized_pb=_b('\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xb9\x04\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12;\n\x0f\x62igquery_config\x18\x0c \x01(\x0b\x32 .feast.core.Store.BigQueryConfigH\x00\x12=\n\x10\x63\x61ssandra_config\x18\r \x01(\x0b\x32!.feast.core.Store.CassandraConfigH\x00\x1a)\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a\x38\n\x0e\x42igQueryConfig\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x1a-\n\x0f\x43\x61ssandraConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a-\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"@\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x0c\n\x08\x42IGQUERY\x10\x02\x12\r\n\tCASSANDRA\x10\x03\x42\x08\n\x06\x63onfigBI\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') +) + + + +_STORE_STORETYPE = _descriptor.EnumDescriptor( + name='StoreType', + full_name='feast.core.Store.StoreType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REDIS', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BIGQUERY', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CASSANDRA', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=534, + serialized_end=598, +) +_sym_db.RegisterEnumDescriptor(_STORE_STORETYPE) + + +_STORE_REDISCONFIG = _descriptor.Descriptor( + name='RedisConfig', + full_name='feast.core.Store.RedisConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='host', full_name='feast.core.Store.RedisConfig.host', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='port', full_name='feast.core.Store.RedisConfig.port', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=339, + serialized_end=380, +) + +_STORE_BIGQUERYCONFIG = _descriptor.Descriptor( + name='BigQueryConfig', + full_name='feast.core.Store.BigQueryConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='feast.core.Store.BigQueryConfig.project_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dataset_id', full_name='feast.core.Store.BigQueryConfig.dataset_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=382, + serialized_end=438, +) + +_STORE_CASSANDRACONFIG = _descriptor.Descriptor( + name='CassandraConfig', + full_name='feast.core.Store.CassandraConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='host', full_name='feast.core.Store.CassandraConfig.host', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='port', full_name='feast.core.Store.CassandraConfig.port', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=440, + serialized_end=485, +) + +_STORE_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='feast.core.Store.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.Store.Subscription.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='feast.core.Store.Subscription.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=487, + serialized_end=532, +) + +_STORE = _descriptor.Descriptor( + name='Store', + full_name='feast.core.Store', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.Store.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='feast.core.Store.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='subscriptions', full_name='feast.core.Store.subscriptions', index=2, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='redis_config', full_name='feast.core.Store.redis_config', index=3, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bigquery_config', full_name='feast.core.Store.bigquery_config', index=4, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cassandra_config', full_name='feast.core.Store.cassandra_config', index=5, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_STORE_REDISCONFIG, _STORE_BIGQUERYCONFIG, _STORE_CASSANDRACONFIG, _STORE_SUBSCRIPTION, ], + enum_types=[ + _STORE_STORETYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='config', full_name='feast.core.Store.config', + index=0, containing_type=None, fields=[]), + ], + serialized_start=39, + serialized_end=608, +) + +_STORE_REDISCONFIG.containing_type = _STORE +_STORE_BIGQUERYCONFIG.containing_type = _STORE +_STORE_CASSANDRACONFIG.containing_type = _STORE +_STORE_SUBSCRIPTION.containing_type = _STORE +_STORE.fields_by_name['type'].enum_type = _STORE_STORETYPE +_STORE.fields_by_name['subscriptions'].message_type = _STORE_SUBSCRIPTION +_STORE.fields_by_name['redis_config'].message_type = _STORE_REDISCONFIG +_STORE.fields_by_name['bigquery_config'].message_type = _STORE_BIGQUERYCONFIG +_STORE.fields_by_name['cassandra_config'].message_type = _STORE_CASSANDRACONFIG +_STORE_STORETYPE.containing_type = _STORE +_STORE.oneofs_by_name['config'].fields.append( + _STORE.fields_by_name['redis_config']) +_STORE.fields_by_name['redis_config'].containing_oneof = _STORE.oneofs_by_name['config'] +_STORE.oneofs_by_name['config'].fields.append( + _STORE.fields_by_name['bigquery_config']) +_STORE.fields_by_name['bigquery_config'].containing_oneof = _STORE.oneofs_by_name['config'] +_STORE.oneofs_by_name['config'].fields.append( + _STORE.fields_by_name['cassandra_config']) +_STORE.fields_by_name['cassandra_config'].containing_oneof = _STORE.oneofs_by_name['config'] +DESCRIPTOR.message_types_by_name['Store'] = _STORE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Store = _reflection.GeneratedProtocolMessageType('Store', (_message.Message,), { + + 'RedisConfig' : _reflection.GeneratedProtocolMessageType('RedisConfig', (_message.Message,), { + 'DESCRIPTOR' : _STORE_REDISCONFIG, + '__module__' : 'feast.core.Store_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Store.RedisConfig) + }) + , + + 'BigQueryConfig' : _reflection.GeneratedProtocolMessageType('BigQueryConfig', (_message.Message,), { + 'DESCRIPTOR' : _STORE_BIGQUERYCONFIG, + '__module__' : 'feast.core.Store_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Store.BigQueryConfig) + }) + , + + 'CassandraConfig' : _reflection.GeneratedProtocolMessageType('CassandraConfig', (_message.Message,), { + 'DESCRIPTOR' : _STORE_CASSANDRACONFIG, + '__module__' : 'feast.core.Store_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Store.CassandraConfig) + }) + , + + 'Subscription' : _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), { + 'DESCRIPTOR' : _STORE_SUBSCRIPTION, + '__module__' : 'feast.core.Store_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Store.Subscription) + }) + , + 'DESCRIPTOR' : _STORE, + '__module__' : 'feast.core.Store_pb2' + # @@protoc_insertion_point(class_scope:feast.core.Store) + }) +_sym_db.RegisterMessage(Store) +_sym_db.RegisterMessage(Store.RedisConfig) +_sym_db.RegisterMessage(Store.BigQueryConfig) +_sym_db.RegisterMessage(Store.CassandraConfig) +_sym_db.RegisterMessage(Store.Subscription) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/Store_pb2.pyi b/sdk/python/feast/core/Store_pb2.pyi new file mode 100644 index 00000000000..726a9d5443e --- /dev/null +++ b/sdk/python/feast/core/Store_pb2.pyi @@ -0,0 +1,163 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class Store(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class StoreType(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> Store.StoreType: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[Store.StoreType]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, Store.StoreType]]: ... + INVALID = typing___cast(Store.StoreType, 0) + REDIS = typing___cast(Store.StoreType, 1) + BIGQUERY = typing___cast(Store.StoreType, 2) + CASSANDRA = typing___cast(Store.StoreType, 3) + INVALID = typing___cast(Store.StoreType, 0) + REDIS = typing___cast(Store.StoreType, 1) + BIGQUERY = typing___cast(Store.StoreType, 2) + CASSANDRA = typing___cast(Store.StoreType, 3) + + class RedisConfig(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + host = ... # type: typing___Text + port = ... # type: int + + def __init__(self, + *, + host : typing___Optional[typing___Text] = None, + port : typing___Optional[int] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Store.RedisConfig: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"host",u"port"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"host",b"host",u"port",b"port"]) -> None: ... + + class BigQueryConfig(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project_id = ... # type: typing___Text + dataset_id = ... # type: typing___Text + + def __init__(self, + *, + project_id : typing___Optional[typing___Text] = None, + dataset_id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Store.BigQueryConfig: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_id",u"project_id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_id",b"dataset_id",u"project_id",b"project_id"]) -> None: ... + + class CassandraConfig(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + host = ... # type: typing___Text + port = ... # type: int + + def __init__(self, + *, + host : typing___Optional[typing___Text] = None, + port : typing___Optional[int] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Store.CassandraConfig: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"host",u"port"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"host",b"host",u"port",b"port"]) -> None: ... + + class Subscription(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + version = ... # type: typing___Text + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + version : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Store.Subscription: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"version"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"version",b"version"]) -> None: ... + + name = ... # type: typing___Text + type = ... # type: Store.StoreType + + @property + def subscriptions(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Store.Subscription]: ... + + @property + def redis_config(self) -> Store.RedisConfig: ... + + @property + def bigquery_config(self) -> Store.BigQueryConfig: ... + + @property + def cassandra_config(self) -> Store.CassandraConfig: ... + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + type : typing___Optional[Store.StoreType] = None, + subscriptions : typing___Optional[typing___Iterable[Store.Subscription]] = None, + redis_config : typing___Optional[Store.RedisConfig] = None, + bigquery_config : typing___Optional[Store.BigQueryConfig] = None, + cassandra_config : typing___Optional[Store.CassandraConfig] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Store: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",u"cassandra_config",u"config",u"redis_config"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bigquery_config",u"cassandra_config",u"config",u"name",u"redis_config",u"subscriptions",u"type"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",b"bigquery_config",u"cassandra_config",b"cassandra_config",u"config",b"config",u"redis_config",b"redis_config"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bigquery_config",b"bigquery_config",u"cassandra_config",b"cassandra_config",u"config",b"config",u"name",b"name",u"redis_config",b"redis_config",u"subscriptions",b"subscriptions",u"type",b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"config",b"config"]) -> typing_extensions___Literal["redis_config","bigquery_config","cassandra_config"]: ... diff --git a/sdk/python/feast/core/Store_pb2_grpc.py b/sdk/python/feast/core/Store_pb2_grpc.py new file mode 100644 index 00000000000..a89435267cb --- /dev/null +++ b/sdk/python/feast/core/Store_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/sdk/python/feast/core/UIService_pb2.py b/sdk/python/feast/core/UIService_pb2.py deleted file mode 100644 index d4d477ac850..00000000000 --- a/sdk/python/feast/core/UIService_pb2.py +++ /dev/null @@ -1,869 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/core/UIService.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 -from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 -from feast.specs import FeatureGroupSpec_pb2 as feast_dot_specs_dot_FeatureGroupSpec__pb2 -from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/core/UIService.proto', - package='feast.core', - syntax='proto3', - serialized_options=_b('\n\nfeast.coreB\016UIServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/core'), - serialized_pb=_b('\n\x1a\x66\x65\x61st/core/UIService.proto\x12\nfeast.core\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\"feast/specs/FeatureGroupSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x90\x0b\n\x0eUIServiceTypes\x1at\n\x0c\x45ntityDetail\x12%\n\x04spec\x18\x01 \x01(\x0b\x32\x17.feast.specs.EntitySpec\x12\x0c\n\x04jobs\x18\x02 \x03(\t\x12/\n\x0blastUpdated\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1e\n\x10GetEntityRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1aL\n\x11GetEntityResponse\x12\x37\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\'.feast.core.UIServiceTypes.EntityDetail\x1aQ\n\x14ListEntitiesResponse\x12\x39\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\'.feast.core.UIServiceTypes.EntityDetail\x1a\xca\x01\n\rFeatureDetail\x12&\n\x04spec\x18\x01 \x01(\x0b\x32\x18.feast.specs.FeatureSpec\x12\x14\n\x0c\x62igqueryView\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x0c\n\x04jobs\x18\x04 \x03(\t\x12/\n\x0blastUpdated\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07\x63reated\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1f\n\x11GetFeatureRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1az\n\x12GetFeatureResponse\x12\x39\n\x07\x66\x65\x61ture\x18\x01 \x01(\x0b\x32(.feast.core.UIServiceTypes.FeatureDetail\x12)\n\x07rawSpec\x18\x02 \x01(\x0b\x32\x18.feast.specs.FeatureSpec\x1aR\n\x14ListFeaturesResponse\x12:\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32(.feast.core.UIServiceTypes.FeatureDetail\x1ar\n\x12\x46\x65\x61tureGroupDetail\x12+\n\x04spec\x18\x01 \x01(\x0b\x32\x1d.feast.specs.FeatureGroupSpec\x12/\n\x0blastUpdated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a$\n\x16GetFeatureGroupRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1a^\n\x17GetFeatureGroupResponse\x12\x43\n\x0c\x66\x65\x61tureGroup\x18\x01 \x01(\x0b\x32-.feast.core.UIServiceTypes.FeatureGroupDetail\x1a\x61\n\x19ListFeatureGroupsResponse\x12\x44\n\rfeatureGroups\x18\x01 \x03(\x0b\x32-.feast.core.UIServiceTypes.FeatureGroupDetail\x1ah\n\rStorageDetail\x12&\n\x04spec\x18\x01 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12/\n\x0blastUpdated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1f\n\x11GetStorageRequest\x12\n\n\x02id\x18\x01 \x01(\t\x1aO\n\x12GetStorageResponse\x12\x39\n\x07storage\x18\x01 \x01(\x0b\x32(.feast.core.UIServiceTypes.StorageDetail\x1aP\n\x13ListStorageResponse\x12\x39\n\x07storage\x18\x01 \x03(\x0b\x32(.feast.core.UIServiceTypes.StorageDetail2\xbf\x06\n\tUIService\x12h\n\tGetEntity\x12+.feast.core.UIServiceTypes.GetEntityRequest\x1a,.feast.core.UIServiceTypes.GetEntityResponse\"\x00\x12Y\n\x0cListEntities\x12\x16.google.protobuf.Empty\x1a/.feast.core.UIServiceTypes.ListEntitiesResponse\"\x00\x12k\n\nGetFeature\x12,.feast.core.UIServiceTypes.GetFeatureRequest\x1a-.feast.core.UIServiceTypes.GetFeatureResponse\"\x00\x12Y\n\x0cListFeatures\x12\x16.google.protobuf.Empty\x1a/.feast.core.UIServiceTypes.ListFeaturesResponse\"\x00\x12z\n\x0fGetFeatureGroup\x12\x31.feast.core.UIServiceTypes.GetFeatureGroupRequest\x1a\x32.feast.core.UIServiceTypes.GetFeatureGroupResponse\"\x00\x12\x63\n\x11ListFeatureGroups\x12\x16.google.protobuf.Empty\x1a\x34.feast.core.UIServiceTypes.ListFeatureGroupsResponse\"\x00\x12k\n\nGetStorage\x12,.feast.core.UIServiceTypes.GetStorageRequest\x1a-.feast.core.UIServiceTypes.GetStorageResponse\"\x00\x12W\n\x0bListStorage\x12\x16.google.protobuf.Empty\x1a..feast.core.UIServiceTypes.ListStorageResponse\"\x00\x42S\n\nfeast.coreB\x0eUIServiceProtoZ5github.com/gojek/feast/protos/generated/go/feast/coreb\x06proto3') - , - dependencies=[feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureGroupSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_UISERVICETYPES_ENTITYDETAIL = _descriptor.Descriptor( - name='EntityDetail', - full_name='feast.core.UIServiceTypes.EntityDetail', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='spec', full_name='feast.core.UIServiceTypes.EntityDetail.spec', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='jobs', full_name='feast.core.UIServiceTypes.EntityDetail.jobs', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lastUpdated', full_name='feast.core.UIServiceTypes.EntityDetail.lastUpdated', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=251, - serialized_end=367, -) - -_UISERVICETYPES_GETENTITYREQUEST = _descriptor.Descriptor( - name='GetEntityRequest', - full_name='feast.core.UIServiceTypes.GetEntityRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.UIServiceTypes.GetEntityRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=369, - serialized_end=399, -) - -_UISERVICETYPES_GETENTITYRESPONSE = _descriptor.Descriptor( - name='GetEntityResponse', - full_name='feast.core.UIServiceTypes.GetEntityResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity', full_name='feast.core.UIServiceTypes.GetEntityResponse.entity', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=401, - serialized_end=477, -) - -_UISERVICETYPES_LISTENTITIESRESPONSE = _descriptor.Descriptor( - name='ListEntitiesResponse', - full_name='feast.core.UIServiceTypes.ListEntitiesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entities', full_name='feast.core.UIServiceTypes.ListEntitiesResponse.entities', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=479, - serialized_end=560, -) - -_UISERVICETYPES_FEATUREDETAIL = _descriptor.Descriptor( - name='FeatureDetail', - full_name='feast.core.UIServiceTypes.FeatureDetail', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='spec', full_name='feast.core.UIServiceTypes.FeatureDetail.spec', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bigqueryView', full_name='feast.core.UIServiceTypes.FeatureDetail.bigqueryView', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='enabled', full_name='feast.core.UIServiceTypes.FeatureDetail.enabled', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='jobs', full_name='feast.core.UIServiceTypes.FeatureDetail.jobs', index=3, - number=4, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lastUpdated', full_name='feast.core.UIServiceTypes.FeatureDetail.lastUpdated', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='created', full_name='feast.core.UIServiceTypes.FeatureDetail.created', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=563, - serialized_end=765, -) - -_UISERVICETYPES_GETFEATUREREQUEST = _descriptor.Descriptor( - name='GetFeatureRequest', - full_name='feast.core.UIServiceTypes.GetFeatureRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.UIServiceTypes.GetFeatureRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=767, - serialized_end=798, -) - -_UISERVICETYPES_GETFEATURERESPONSE = _descriptor.Descriptor( - name='GetFeatureResponse', - full_name='feast.core.UIServiceTypes.GetFeatureResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='feature', full_name='feast.core.UIServiceTypes.GetFeatureResponse.feature', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='rawSpec', full_name='feast.core.UIServiceTypes.GetFeatureResponse.rawSpec', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=800, - serialized_end=922, -) - -_UISERVICETYPES_LISTFEATURESRESPONSE = _descriptor.Descriptor( - name='ListFeaturesResponse', - full_name='feast.core.UIServiceTypes.ListFeaturesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='features', full_name='feast.core.UIServiceTypes.ListFeaturesResponse.features', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=924, - serialized_end=1006, -) - -_UISERVICETYPES_FEATUREGROUPDETAIL = _descriptor.Descriptor( - name='FeatureGroupDetail', - full_name='feast.core.UIServiceTypes.FeatureGroupDetail', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='spec', full_name='feast.core.UIServiceTypes.FeatureGroupDetail.spec', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lastUpdated', full_name='feast.core.UIServiceTypes.FeatureGroupDetail.lastUpdated', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1008, - serialized_end=1122, -) - -_UISERVICETYPES_GETFEATUREGROUPREQUEST = _descriptor.Descriptor( - name='GetFeatureGroupRequest', - full_name='feast.core.UIServiceTypes.GetFeatureGroupRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.UIServiceTypes.GetFeatureGroupRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1124, - serialized_end=1160, -) - -_UISERVICETYPES_GETFEATUREGROUPRESPONSE = _descriptor.Descriptor( - name='GetFeatureGroupResponse', - full_name='feast.core.UIServiceTypes.GetFeatureGroupResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='featureGroup', full_name='feast.core.UIServiceTypes.GetFeatureGroupResponse.featureGroup', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1162, - serialized_end=1256, -) - -_UISERVICETYPES_LISTFEATUREGROUPSRESPONSE = _descriptor.Descriptor( - name='ListFeatureGroupsResponse', - full_name='feast.core.UIServiceTypes.ListFeatureGroupsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='featureGroups', full_name='feast.core.UIServiceTypes.ListFeatureGroupsResponse.featureGroups', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1258, - serialized_end=1355, -) - -_UISERVICETYPES_STORAGEDETAIL = _descriptor.Descriptor( - name='StorageDetail', - full_name='feast.core.UIServiceTypes.StorageDetail', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='spec', full_name='feast.core.UIServiceTypes.StorageDetail.spec', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lastUpdated', full_name='feast.core.UIServiceTypes.StorageDetail.lastUpdated', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1357, - serialized_end=1461, -) - -_UISERVICETYPES_GETSTORAGEREQUEST = _descriptor.Descriptor( - name='GetStorageRequest', - full_name='feast.core.UIServiceTypes.GetStorageRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.core.UIServiceTypes.GetStorageRequest.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1463, - serialized_end=1494, -) - -_UISERVICETYPES_GETSTORAGERESPONSE = _descriptor.Descriptor( - name='GetStorageResponse', - full_name='feast.core.UIServiceTypes.GetStorageResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='storage', full_name='feast.core.UIServiceTypes.GetStorageResponse.storage', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1496, - serialized_end=1575, -) - -_UISERVICETYPES_LISTSTORAGERESPONSE = _descriptor.Descriptor( - name='ListStorageResponse', - full_name='feast.core.UIServiceTypes.ListStorageResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='storage', full_name='feast.core.UIServiceTypes.ListStorageResponse.storage', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1577, - serialized_end=1657, -) - -_UISERVICETYPES = _descriptor.Descriptor( - name='UIServiceTypes', - full_name='feast.core.UIServiceTypes', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[_UISERVICETYPES_ENTITYDETAIL, _UISERVICETYPES_GETENTITYREQUEST, _UISERVICETYPES_GETENTITYRESPONSE, _UISERVICETYPES_LISTENTITIESRESPONSE, _UISERVICETYPES_FEATUREDETAIL, _UISERVICETYPES_GETFEATUREREQUEST, _UISERVICETYPES_GETFEATURERESPONSE, _UISERVICETYPES_LISTFEATURESRESPONSE, _UISERVICETYPES_FEATUREGROUPDETAIL, _UISERVICETYPES_GETFEATUREGROUPREQUEST, _UISERVICETYPES_GETFEATUREGROUPRESPONSE, _UISERVICETYPES_LISTFEATUREGROUPSRESPONSE, _UISERVICETYPES_STORAGEDETAIL, _UISERVICETYPES_GETSTORAGEREQUEST, _UISERVICETYPES_GETSTORAGERESPONSE, _UISERVICETYPES_LISTSTORAGERESPONSE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=233, - serialized_end=1657, -) - -_UISERVICETYPES_ENTITYDETAIL.fields_by_name['spec'].message_type = feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC -_UISERVICETYPES_ENTITYDETAIL.fields_by_name['lastUpdated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UISERVICETYPES_ENTITYDETAIL.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETENTITYREQUEST.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETENTITYRESPONSE.fields_by_name['entity'].message_type = _UISERVICETYPES_ENTITYDETAIL -_UISERVICETYPES_GETENTITYRESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_LISTENTITIESRESPONSE.fields_by_name['entities'].message_type = _UISERVICETYPES_ENTITYDETAIL -_UISERVICETYPES_LISTENTITIESRESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_FEATUREDETAIL.fields_by_name['spec'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC -_UISERVICETYPES_FEATUREDETAIL.fields_by_name['lastUpdated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UISERVICETYPES_FEATUREDETAIL.fields_by_name['created'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UISERVICETYPES_FEATUREDETAIL.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETFEATUREREQUEST.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETFEATURERESPONSE.fields_by_name['feature'].message_type = _UISERVICETYPES_FEATUREDETAIL -_UISERVICETYPES_GETFEATURERESPONSE.fields_by_name['rawSpec'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC -_UISERVICETYPES_GETFEATURERESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_LISTFEATURESRESPONSE.fields_by_name['features'].message_type = _UISERVICETYPES_FEATUREDETAIL -_UISERVICETYPES_LISTFEATURESRESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_FEATUREGROUPDETAIL.fields_by_name['spec'].message_type = feast_dot_specs_dot_FeatureGroupSpec__pb2._FEATUREGROUPSPEC -_UISERVICETYPES_FEATUREGROUPDETAIL.fields_by_name['lastUpdated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UISERVICETYPES_FEATUREGROUPDETAIL.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETFEATUREGROUPREQUEST.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETFEATUREGROUPRESPONSE.fields_by_name['featureGroup'].message_type = _UISERVICETYPES_FEATUREGROUPDETAIL -_UISERVICETYPES_GETFEATUREGROUPRESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_LISTFEATUREGROUPSRESPONSE.fields_by_name['featureGroups'].message_type = _UISERVICETYPES_FEATUREGROUPDETAIL -_UISERVICETYPES_LISTFEATUREGROUPSRESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_STORAGEDETAIL.fields_by_name['spec'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -_UISERVICETYPES_STORAGEDETAIL.fields_by_name['lastUpdated'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UISERVICETYPES_STORAGEDETAIL.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETSTORAGEREQUEST.containing_type = _UISERVICETYPES -_UISERVICETYPES_GETSTORAGERESPONSE.fields_by_name['storage'].message_type = _UISERVICETYPES_STORAGEDETAIL -_UISERVICETYPES_GETSTORAGERESPONSE.containing_type = _UISERVICETYPES -_UISERVICETYPES_LISTSTORAGERESPONSE.fields_by_name['storage'].message_type = _UISERVICETYPES_STORAGEDETAIL -_UISERVICETYPES_LISTSTORAGERESPONSE.containing_type = _UISERVICETYPES -DESCRIPTOR.message_types_by_name['UIServiceTypes'] = _UISERVICETYPES -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -UIServiceTypes = _reflection.GeneratedProtocolMessageType('UIServiceTypes', (_message.Message,), dict( - - EntityDetail = _reflection.GeneratedProtocolMessageType('EntityDetail', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_ENTITYDETAIL, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.EntityDetail) - )) - , - - GetEntityRequest = _reflection.GeneratedProtocolMessageType('GetEntityRequest', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETENTITYREQUEST, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetEntityRequest) - )) - , - - GetEntityResponse = _reflection.GeneratedProtocolMessageType('GetEntityResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETENTITYRESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetEntityResponse) - )) - , - - ListEntitiesResponse = _reflection.GeneratedProtocolMessageType('ListEntitiesResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_LISTENTITIESRESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.ListEntitiesResponse) - )) - , - - FeatureDetail = _reflection.GeneratedProtocolMessageType('FeatureDetail', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_FEATUREDETAIL, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.FeatureDetail) - )) - , - - GetFeatureRequest = _reflection.GeneratedProtocolMessageType('GetFeatureRequest', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETFEATUREREQUEST, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetFeatureRequest) - )) - , - - GetFeatureResponse = _reflection.GeneratedProtocolMessageType('GetFeatureResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETFEATURERESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetFeatureResponse) - )) - , - - ListFeaturesResponse = _reflection.GeneratedProtocolMessageType('ListFeaturesResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_LISTFEATURESRESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.ListFeaturesResponse) - )) - , - - FeatureGroupDetail = _reflection.GeneratedProtocolMessageType('FeatureGroupDetail', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_FEATUREGROUPDETAIL, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.FeatureGroupDetail) - )) - , - - GetFeatureGroupRequest = _reflection.GeneratedProtocolMessageType('GetFeatureGroupRequest', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETFEATUREGROUPREQUEST, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetFeatureGroupRequest) - )) - , - - GetFeatureGroupResponse = _reflection.GeneratedProtocolMessageType('GetFeatureGroupResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETFEATUREGROUPRESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetFeatureGroupResponse) - )) - , - - ListFeatureGroupsResponse = _reflection.GeneratedProtocolMessageType('ListFeatureGroupsResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_LISTFEATUREGROUPSRESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.ListFeatureGroupsResponse) - )) - , - - StorageDetail = _reflection.GeneratedProtocolMessageType('StorageDetail', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_STORAGEDETAIL, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.StorageDetail) - )) - , - - GetStorageRequest = _reflection.GeneratedProtocolMessageType('GetStorageRequest', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETSTORAGEREQUEST, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetStorageRequest) - )) - , - - GetStorageResponse = _reflection.GeneratedProtocolMessageType('GetStorageResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_GETSTORAGERESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.GetStorageResponse) - )) - , - - ListStorageResponse = _reflection.GeneratedProtocolMessageType('ListStorageResponse', (_message.Message,), dict( - DESCRIPTOR = _UISERVICETYPES_LISTSTORAGERESPONSE, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes.ListStorageResponse) - )) - , - DESCRIPTOR = _UISERVICETYPES, - __module__ = 'feast.core.UIService_pb2' - # @@protoc_insertion_point(class_scope:feast.core.UIServiceTypes) - )) -_sym_db.RegisterMessage(UIServiceTypes) -_sym_db.RegisterMessage(UIServiceTypes.EntityDetail) -_sym_db.RegisterMessage(UIServiceTypes.GetEntityRequest) -_sym_db.RegisterMessage(UIServiceTypes.GetEntityResponse) -_sym_db.RegisterMessage(UIServiceTypes.ListEntitiesResponse) -_sym_db.RegisterMessage(UIServiceTypes.FeatureDetail) -_sym_db.RegisterMessage(UIServiceTypes.GetFeatureRequest) -_sym_db.RegisterMessage(UIServiceTypes.GetFeatureResponse) -_sym_db.RegisterMessage(UIServiceTypes.ListFeaturesResponse) -_sym_db.RegisterMessage(UIServiceTypes.FeatureGroupDetail) -_sym_db.RegisterMessage(UIServiceTypes.GetFeatureGroupRequest) -_sym_db.RegisterMessage(UIServiceTypes.GetFeatureGroupResponse) -_sym_db.RegisterMessage(UIServiceTypes.ListFeatureGroupsResponse) -_sym_db.RegisterMessage(UIServiceTypes.StorageDetail) -_sym_db.RegisterMessage(UIServiceTypes.GetStorageRequest) -_sym_db.RegisterMessage(UIServiceTypes.GetStorageResponse) -_sym_db.RegisterMessage(UIServiceTypes.ListStorageResponse) - - -DESCRIPTOR._options = None - -_UISERVICE = _descriptor.ServiceDescriptor( - name='UIService', - full_name='feast.core.UIService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=1660, - serialized_end=2491, - methods=[ - _descriptor.MethodDescriptor( - name='GetEntity', - full_name='feast.core.UIService.GetEntity', - index=0, - containing_service=None, - input_type=_UISERVICETYPES_GETENTITYREQUEST, - output_type=_UISERVICETYPES_GETENTITYRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ListEntities', - full_name='feast.core.UIService.ListEntities', - index=1, - containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_UISERVICETYPES_LISTENTITIESRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='GetFeature', - full_name='feast.core.UIService.GetFeature', - index=2, - containing_service=None, - input_type=_UISERVICETYPES_GETFEATUREREQUEST, - output_type=_UISERVICETYPES_GETFEATURERESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ListFeatures', - full_name='feast.core.UIService.ListFeatures', - index=3, - containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_UISERVICETYPES_LISTFEATURESRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='GetFeatureGroup', - full_name='feast.core.UIService.GetFeatureGroup', - index=4, - containing_service=None, - input_type=_UISERVICETYPES_GETFEATUREGROUPREQUEST, - output_type=_UISERVICETYPES_GETFEATUREGROUPRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ListFeatureGroups', - full_name='feast.core.UIService.ListFeatureGroups', - index=5, - containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_UISERVICETYPES_LISTFEATUREGROUPSRESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='GetStorage', - full_name='feast.core.UIService.GetStorage', - index=6, - containing_service=None, - input_type=_UISERVICETYPES_GETSTORAGEREQUEST, - output_type=_UISERVICETYPES_GETSTORAGERESPONSE, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='ListStorage', - full_name='feast.core.UIService.ListStorage', - index=7, - containing_service=None, - input_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - output_type=_UISERVICETYPES_LISTSTORAGERESPONSE, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_UISERVICE) - -DESCRIPTOR.services_by_name['UIService'] = _UISERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/core/UIService_pb2_grpc.py b/sdk/python/feast/core/UIService_pb2_grpc.py deleted file mode 100644 index ba86fc468ae..00000000000 --- a/sdk/python/feast/core/UIService_pb2_grpc.py +++ /dev/null @@ -1,180 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from feast.core import UIService_pb2 as feast_dot_core_dot_UIService__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class UIServiceStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetEntity = channel.unary_unary( - '/feast.core.UIService/GetEntity', - request_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetEntityRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetEntityResponse.FromString, - ) - self.ListEntities = channel.unary_unary( - '/feast.core.UIService/ListEntities', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListEntitiesResponse.FromString, - ) - self.GetFeature = channel.unary_unary( - '/feast.core.UIService/GetFeature', - request_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureResponse.FromString, - ) - self.ListFeatures = channel.unary_unary( - '/feast.core.UIService/ListFeatures', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListFeaturesResponse.FromString, - ) - self.GetFeatureGroup = channel.unary_unary( - '/feast.core.UIService/GetFeatureGroup', - request_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureGroupRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureGroupResponse.FromString, - ) - self.ListFeatureGroups = channel.unary_unary( - '/feast.core.UIService/ListFeatureGroups', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListFeatureGroupsResponse.FromString, - ) - self.GetStorage = channel.unary_unary( - '/feast.core.UIService/GetStorage', - request_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetStorageRequest.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetStorageResponse.FromString, - ) - self.ListStorage = channel.unary_unary( - '/feast.core.UIService/ListStorage', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListStorageResponse.FromString, - ) - - -class UIServiceServicer(object): - # missing associated documentation comment in .proto file - pass - - def GetEntity(self, request, context): - """ - Get entity specified in request. - This process returns a single of entity specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListEntities(self, request, context): - """ - Get all entities. - This process returns a list of entity specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetFeature(self, request, context): - """ - Get feature specified in request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListFeatures(self, request, context): - """ - Get all features. - This process returns a list of feature specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetFeatureGroup(self, request, context): - """ - Get feature group specified in request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListFeatureGroups(self, request, context): - """ - Get all feature groups. - This process returns a list of feature group specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetStorage(self, request, context): - """ - Get storage spec specified in request. - - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListStorage(self, request, context): - """ - Get all storage specs. - This process returns a list of storage specs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_UIServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'GetEntity': grpc.unary_unary_rpc_method_handler( - servicer.GetEntity, - request_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetEntityRequest.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetEntityResponse.SerializeToString, - ), - 'ListEntities': grpc.unary_unary_rpc_method_handler( - servicer.ListEntities, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListEntitiesResponse.SerializeToString, - ), - 'GetFeature': grpc.unary_unary_rpc_method_handler( - servicer.GetFeature, - request_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureRequest.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureResponse.SerializeToString, - ), - 'ListFeatures': grpc.unary_unary_rpc_method_handler( - servicer.ListFeatures, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListFeaturesResponse.SerializeToString, - ), - 'GetFeatureGroup': grpc.unary_unary_rpc_method_handler( - servicer.GetFeatureGroup, - request_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureGroupRequest.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetFeatureGroupResponse.SerializeToString, - ), - 'ListFeatureGroups': grpc.unary_unary_rpc_method_handler( - servicer.ListFeatureGroups, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListFeatureGroupsResponse.SerializeToString, - ), - 'GetStorage': grpc.unary_unary_rpc_method_handler( - servicer.GetStorage, - request_deserializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetStorageRequest.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.GetStorageResponse.SerializeToString, - ), - 'ListStorage': grpc.unary_unary_rpc_method_handler( - servicer.ListStorage, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=feast_dot_core_dot_UIService__pb2.UIServiceTypes.ListStorageResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'feast.core.UIService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/sdk/python/feast/entity.py b/sdk/python/feast/entity.py new file mode 100644 index 00000000000..a554e03fe38 --- /dev/null +++ b/sdk/python/feast/entity.py @@ -0,0 +1,28 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from feast.value_type import ValueType +from feast.core.FeatureSet_pb2 import EntitySpec as EntityProto +from feast.types import Value_pb2 as ValueTypeProto +from feast.field import Field + + +class Entity(Field): + def to_proto(self) -> EntityProto: + value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) + return EntityProto(name=self.name, value_type=value_type) + + @classmethod + def from_proto(cls, entity_proto: EntityProto): + return cls(name=entity_proto.name, dtype=ValueType(entity_proto.value_type)) diff --git a/sdk/python/feast/sdk/env.py b/sdk/python/feast/exceptions.py similarity index 76% rename from sdk/python/feast/sdk/env.py rename to sdk/python/feast/exceptions.py index 28c012aba7f..0a413693961 100644 --- a/sdk/python/feast/sdk/env.py +++ b/sdk/python/feast/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Feast Authors +# Copyright 2019 The Feast Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,5 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -FEAST_SERVING_URL_ENV_KEY = "FEAST_SERVING_URL" -FEAST_CORE_URL_ENV_KEY = "FEAST_CORE_URL" + +def format_grpc_exception(method: str, code: str, details: str) -> str: + return f'{method} failed with code "{code}"\n{details}' diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py new file mode 100644 index 00000000000..dea779d70c2 --- /dev/null +++ b/sdk/python/feast/feature.py @@ -0,0 +1,28 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from feast.value_type import ValueType +from feast.core.FeatureSet_pb2 import FeatureSpec as FeatureProto +from feast.types import Value_pb2 as ValueTypeProto +from feast.field import Field + + +class Feature(Field): + def to_proto(self) -> FeatureProto: + value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) + return FeatureProto(name=self.name, value_type=value_type) + + @classmethod + def from_proto(cls, feature_proto: FeatureProto): + return cls(name=feature_proto.name, dtype=ValueType(feature_proto.value_type)) diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py new file mode 100644 index 00000000000..550779070f4 --- /dev/null +++ b/sdk/python/feast/feature_set.py @@ -0,0 +1,420 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging + +import os +import pandas as pd +from math import ceil +from multiprocessing import Process, Queue, cpu_count +from typing import List, Optional +from collections import OrderedDict +from typing import Dict +from feast.source import Source +from feast.type_map import dtype_to_value_type +from pandas.api.types import is_datetime64_ns_dtype +from feast.entity import Entity +from feast.feature import Feature, Field +from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto +from feast.types import FeatureRow_pb2 as FeatureRow +from google.protobuf.timestamp_pb2 import Timestamp +from google.protobuf.duration_pb2 import Duration +from kafka import KafkaProducer +from tqdm import tqdm +from feast.type_map import pandas_dtype_to_feast_value_type +from feast.types import FeatureRow_pb2 as FeatureRowProto, Field_pb2 as FieldProto +from feast.type_map import pd_value_to_proto_value +from google.protobuf.json_format import MessageToJson +import yaml +from google.protobuf import json_format +from feast.source import KafkaSource +from feast.type_map import DATETIME_COLUMN +from feast.loaders import yaml as feast_yaml + + +class FeatureSet: + """ + Represents a collection of features. + """ + + def __init__( + self, + name: str, + features: List[Feature] = None, + entities: List[Entity] = None, + source: Source = None, + max_age: Optional[Duration] = None, + ): + self._name = name + self._fields = OrderedDict() # type: Dict[str, Field] + if features is not None: + self.features = features + if entities is not None: + self.entities = entities + if source is None: + self._source = None + else: + self._source = source + self._max_age = max_age + self._version = None + self._client = None + self._busy_ingesting = False + self._is_dirty = True + + def __eq__(self, other): + if not isinstance(other, FeatureSet): + return NotImplemented + + for key in self.fields.keys(): + if key not in other.fields.keys() or self.fields[key] != other.fields[key]: + return False + + if self.name != other.name or self.max_age != other.max_age: + return False + return True + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + @property + def fields(self) -> Dict[str, Field]: + """ + Returns a dict of fields from this feature set + """ + return self._fields + + @property + def features(self) -> List[Feature]: + """ + Returns a list of features from this feature set + """ + return [field for field in self._fields.values() if isinstance(field, Feature)] + + @features.setter + def features(self, features: List[Feature]): + for feature in features: + if not isinstance(feature, Feature): + raise Exception("object type is not a Feature: " + str(type(feature))) + + for key in list(self._fields.keys()): + if isinstance(self._fields[key], Feature): + del self._fields[key] + + if features is not None: + self._add_fields(features) + + @property + def entities(self) -> List[Entity]: + """ + Returns list of entities from this feature set + """ + return [field for field in self._fields.values() if isinstance(field, Entity)] + + @entities.setter + def entities(self, entities: List[Entity]): + for entity in entities: + if not isinstance(entity, Entity): + raise Exception("object type is not na Entity: " + str(type(entity))) + + for key in list(self._fields.keys()): + if isinstance(self._fields[key], Entity): + del self._fields[key] + + if entities is not None: + self._add_fields(entities) + + @property + def name(self): + return self._name + + @name.setter + def name(self, name): + self._name = name + + @property + def source(self): + return self._source + + @source.setter + def source(self, source: Source): + self._source = source + + @property + def version(self): + return self._version + + @version.setter + def version(self, version): + self._version = version + + @property + def max_age(self): + return self._max_age + + @max_age.setter + def max_age(self, max_age): + self._max_age = max_age + + @property + def is_dirty(self): + return self._is_dirty + + def add(self, resource): + """ + Adds a resource (Feature, Entity) to this Feature Set. + Does not register the updated Feature Set with Feast Core + :param resource: A resource can be either a Feature or an Entity object + :return: + """ + if resource.name in self._fields.keys(): + raise ValueError( + 'could not add field "' + + resource.name + + '" since it already exists in feature set "' + + self._name + + '"' + ) + + if issubclass(type(resource), Field): + return self._set_field(resource) + + raise ValueError("Could not identify the resource being added") + + def _set_field(self, field: Field): + self._fields[field.name] = field + return + + def drop(self, name: str): + """ + Removes a Feature or Entity from a Feature Set + :param name: Name of Feature or Entity to be removed + """ + if name not in self._fields: + raise ValueError("Could not find field " + name + ", no action taken") + if name in self._fields: + del self._fields[name] + return + + def _add_fields(self, fields: List[Field]): + """ + Adds multiple Fields to a Feature Set + :param fields: List of Feature or Entity Objects + """ + for field in fields: + self.add(field) + + def infer_fields_from_df( + self, + df: pd.DataFrame, + entities: Optional[List[Entity]] = None, + features: Optional[List[Feature]] = None, + replace_existing_features: bool = False, + replace_existing_entities: bool = False, + discard_unused_fields: bool = False, + ): + """ + Adds fields (Features or Entities) to a feature set based on the schema + of a Datatframe. Only Pandas dataframes are supported. All columns are + detected as features, so setting at least one entity manually is + advised. + + :param df: Pandas dataframe to read schema from + :param entities: List of entities that will be set manually and not + inferred. These will take precedence over any existing entities or + entities found in the dataframe. + :param features: List of features that will be set manually and not + inferred. These will take precedence over any existing feature or + features found in the dataframe. + :param discard_unused_fields: Boolean flag. Setting this to True will + discard any existing fields that are not found in the dataset or + provided by the user + :param replace_existing_features: Boolean flag. If true, will replace + existing features in this feature set with features found in dataframe. + If false, will skip conflicting features + :param replace_existing_entities: Boolean flag. If true, will replace + existing entities in this feature set with features found in dataframe. + If false, will skip conflicting entities + """ + + if entities is None: + entities = list() + if features is None: + features = list() + + # Validate whether the datetime column exists with the right name + if DATETIME_COLUMN not in df: + raise Exception("No column 'datetime'") + + # Validate the data type for the datetime column + if not is_datetime64_ns_dtype(df.dtypes[DATETIME_COLUMN]): + raise Exception( + "Column 'datetime' does not have the correct type: datetime64[ns]" + ) + + # Create dictionary of fields that will not be inferred (manually set) + provided_fields = OrderedDict() + + for field in entities + features: + if not isinstance(field, Field): + raise Exception(f"Invalid field object type provided {type(field)}") + if field.name not in provided_fields: + provided_fields[field.name] = field + else: + raise Exception(f"Duplicate field name detected {field.name}.") + + new_fields = self._fields.copy() + output_log = "" + + # Add in provided fields + for name, field in provided_fields.items(): + if name in new_fields.keys(): + upsert_message = "created" + else: + upsert_message = "updated (replacing an existing field)" + + output_log += ( + f"{type(field).__name__} {field.name}" + f"({field.dtype}) manually {upsert_message}.\n" + ) + new_fields[name] = field + + # Iterate over all of the columns and create features + for column in df.columns: + column = column.strip() + + # Skip datetime column + if DATETIME_COLUMN in column: + continue + + # Skip user provided fields + if column in provided_fields.keys(): + continue + + # Only overwrite conflicting fields if replacement is allowed + if column in new_fields: + if ( + isinstance(self._fields[column], Feature) + and not replace_existing_features + ): + continue + + if ( + isinstance(self._fields[column], Entity) + and not replace_existing_entities + ): + continue + + # Store this field as a feature + new_fields[column] = Feature( + name=column, dtype=pandas_dtype_to_feast_value_type(df[column].dtype) + ) + output_log += f"{type(new_fields[column]).__name__} {new_fields[column].name} ({new_fields[column].dtype}) added from dataframe.\n" + + # Discard unused fields from feature set + if discard_unused_fields: + keys_to_remove = [] + for key in new_fields.keys(): + if not (key in df.columns or key in provided_fields.keys()): + output_log += f"{type(new_fields[key]).__name__} {new_fields[key].name} ({new_fields[key].dtype}) removed because it is unused.\n" + keys_to_remove.append(key) + for key in keys_to_remove: + del new_fields[key] + + # Update feature set + self._fields = new_fields + print(output_log) + + def _update_from_feature_set(self, feature_set, is_dirty: bool = True): + + self.name = feature_set.name + self.version = feature_set.version + self.source = feature_set.source + self.max_age = feature_set.max_age + self.features = feature_set.features + self.entities = feature_set.entities + + self._is_dirty = is_dirty + + def get_kafka_source_brokers(self) -> str: + if self.source and self.source.source_type is "Kafka": + return self.source.brokers + raise Exception("Source type could not be identified") + + def get_kafka_source_topic(self) -> str: + if self.source and self.source.source_type == "Kafka": + return self.source.topic + raise Exception("Source type could not be identified") + + def is_valid(self): + """ + Validates the state of a feature set locally + :return: (bool, str) True if valid, false if invalid. Contains a message + string with a reason + """ + if len(self.entities) == 0: + return False, f"No entities found in feature set {self.name}" + return True, "" + + @classmethod + def from_yaml(cls, yml): + return cls.from_dict(feast_yaml.yaml_loader(yml, load_single=True)) + + @classmethod + def from_dict(cls, fs_dict): + if ("kind" not in fs_dict) and (fs_dict["kind"].strip() != "feature_set"): + raise Exception(f"Resource kind is not a feature set {str(fs_dict)}") + feature_set_proto = json_format.ParseDict( + fs_dict, FeatureSetSpecProto(), ignore_unknown_fields=True + ) + return cls.from_proto(feature_set_proto) + + @classmethod + def from_proto(cls, feature_set_proto: FeatureSetSpecProto): + feature_set = cls( + name=feature_set_proto.name, + features=[ + Feature.from_proto(feature) for feature in feature_set_proto.features + ], + entities=[ + Entity.from_proto(entity) for entity in feature_set_proto.entities + ], + max_age=feature_set_proto.max_age, + source=( + None + if feature_set_proto.source.type == 0 + else Source.from_proto(feature_set_proto.source) + ), + ) + feature_set._version = feature_set_proto.version + feature_set._is_dirty = False + return feature_set + + def to_proto(self) -> FeatureSetSpecProto: + return FeatureSetSpecProto( + name=self.name, + version=self.version, + max_age=self.max_age, + source=self.source.to_proto() if self.source is not None else None, + features=[ + field.to_proto() + for field in self._fields.values() + if type(field) == Feature + ], + entities=[ + field.to_proto() + for field in self._fields.values() + if type(field) == Entity + ], + ) diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py new file mode 100644 index 00000000000..515adabdf04 --- /dev/null +++ b/sdk/python/feast/field.py @@ -0,0 +1,42 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from feast.value_type import ValueType + + +class Field: + def __init__(self, name: str, dtype: ValueType): + self._name = name + if not isinstance(dtype, ValueType): + raise ValueError("dtype is not a valid ValueType") + self._dtype = dtype + + def __eq__(self, other): + if self.name != other.name or self.dtype != other.dtype: + return False + return True + + @property + def name(self): + return self._name + + @property + def dtype(self) -> ValueType: + return self._dtype + + def to_proto(self): + pass + + def from_proto(self, proto): + pass diff --git a/sdk/python/feast/job.py b/sdk/python/feast/job.py new file mode 100644 index 00000000000..a2187a27059 --- /dev/null +++ b/sdk/python/feast/job.py @@ -0,0 +1,122 @@ +import tempfile +import time +from datetime import datetime, timedelta +from typing import List +from urllib.parse import urlparse + +import fastavro +import pandas as pd +from fastavro import reader as fastavro_reader +from google.cloud import storage + +from feast.serving.ServingService_pb2 import GetJobRequest +from feast.serving.ServingService_pb2 import ( + Job as JobProto, + JOB_STATUS_DONE, + DATA_FORMAT_AVRO, +) +from feast.serving.ServingService_pb2_grpc import ServingServiceStub + +# Maximum no of seconds to wait until the jobs status is DONE in Feast +# Currently set to the maximum query execution time limit in BigQuery +DEFAULT_TIMEOUT_SEC: int = 21600 + +# Maximum no of seconds to wait before reloading the job status in Feast +MAX_WAIT_INTERVAL_SEC: int = 60 + + +class Job: + """ + A class representing a job for feature retrieval in Feast. + """ + + def __init__(self, job_proto: JobProto, serving_stub: ServingServiceStub): + """ + Args: + job_proto: Job proto object (wrapped by this job object) + serving_stub: Stub for Feast serving service + storage_client: Google Cloud Storage client + """ + self.job_proto = job_proto + self.serving_stub = serving_stub + self.storage_client = storage.Client(project=None) + + @property + def id(self): + return self.job_proto.id + + @property + def status(self): + return self.job_proto.status + + def reload(self): + """ + Reload the latest job status + Returns: None + """ + self.job_proto = self.serving_stub.GetJob(GetJobRequest(job=self.job_proto)).job + + def result(self, timeout_sec: int = DEFAULT_TIMEOUT_SEC): + """ + Wait until job is done to get an iterable rows of result. + The row can only represent an Avro row in Feast 0.3. + + Args: + timeout_sec: max no of seconds to wait until job is done. If "timeout_sec" is exceeded, an exception will be raised. + + Returns: Iterable of Avro rows + + """ + max_wait_datetime = datetime.now() + timedelta(seconds=timeout_sec) + wait_duration_sec = 2 + + while self.status != JOB_STATUS_DONE: + if datetime.now() > max_wait_datetime: + raise Exception( + "Timeout exceeded while waiting for result. Please retry this method or use a longer timeout value." + ) + + self.reload() + time.sleep(wait_duration_sec) + # Backoff the wait duration exponentially up till MAX_WAIT_INTERVAL_SEC + wait_duration_sec = min(wait_duration_sec * 2, MAX_WAIT_INTERVAL_SEC) + + if self.job_proto.error: + raise Exception(self.job_proto.error) + + if self.job_proto.data_format != DATA_FORMAT_AVRO: + raise Exception( + "Feast only supports Avro data format for now. Please check " + "your Feast Serving deployment." + ) + + uris = [urlparse(uri) for uri in self.job_proto.file_uris] + for file_uri in uris: + if file_uri.scheme == "gs": + file_obj = tempfile.TemporaryFile() + self.storage_client.download_blob_to_file(file_uri.geturl(), file_obj) + elif file_uri.scheme == "file": + file_obj = open(file_uri.path, "rb") + else: + raise Exception( + f"Could not identify file URI {file_uri}. Only gs:// and file:// supported" + ) + + file_obj.seek(0) + avro_reader = fastavro.reader(file_obj) + + for record in avro_reader: + yield record + + def to_dataframe(self, timeout_sec: int = DEFAULT_TIMEOUT_SEC): + """ + Wait until job is done to get an interable rows of result + Args: + timeout_sec: max no of seconds to wait until job is done. If "timeout_sec" is exceeded, an exception will be raised. + Returns: pandas Dataframe of the feature values + """ + records = [r for r in self.result(timeout_sec=timeout_sec)] + return pd.DataFrame.from_records(records) + + def __iter__(self): + return iter(self.result()) diff --git a/sdk/python/feast/sdk/resources/__init__.py b/sdk/python/feast/loaders/__init__.py similarity index 100% rename from sdk/python/feast/sdk/resources/__init__.py rename to sdk/python/feast/loaders/__init__.py diff --git a/sdk/python/feast/loaders/file.py b/sdk/python/feast/loaders/file.py new file mode 100644 index 00000000000..062aec7f041 --- /dev/null +++ b/sdk/python/feast/loaders/file.py @@ -0,0 +1,72 @@ +import shutil +import tempfile +from typing import Optional +from urllib.parse import urlparse +import uuid +import pandas as pd +from datetime import datetime +from google.cloud import storage +from pandavro import to_avro + + +def export_dataframe_to_staging_location( + df: pd.DataFrame, staging_location_uri: str +) -> str: + """ + Uploads a dataframe to a remote staging location + :param df: Pandas dataframe + :param staging_location_uri: Remote staging location where dataframe should be written + Examples: gs://bucket/path/ + file:///data/subfolder/ + :return: Returns the full path to the file in the remote staging location + """ + # Validate staging location + uri = urlparse(staging_location_uri) + if uri.scheme == "gs": + dir_path, file_name, source_path = export_dataframe_to_local(df) + upload_file_to_gcs( + source_path, uri.hostname, str(uri.path).strip("/") + "/" + file_name + ) + if len(str(dir_path)) < 5: + raise Exception(f"Export location {dir_path} dangerous. Stopping.") + shutil.rmtree(dir_path) + elif uri.scheme == "file": + dir_path, file_name, source_path = export_dataframe_to_local(df, uri.path) + else: + raise Exception( + f"Staging location {staging_location_uri} does not have a valid URI. Only gs:// and file:// are supported" + ) + + return staging_location_uri.rstrip("/") + "/" + file_name + + +def export_dataframe_to_local(df: pd.DataFrame, dir_path: Optional[str] = None): + """ + Exports a pandas dataframe to the local filesystem + :param df: Pandas dataframe to save + :param dir_path: (optional) Absolute directory path '/data/project/subfolder/' + :return: + """ + # Create local staging location if not provided + if dir_path is None: + dir_path = tempfile.mkdtemp() + + file_name = f'{datetime.now().strftime("%d-%m-%Y_%I-%M-%S_%p")}_{str(uuid.uuid4())[:8]}.avro' + dest_path = f"{dir_path}/{file_name}" + + # Export dataset to file in local path + to_avro(df=df, file_path_or_buffer=dest_path) + return dir_path, file_name, dest_path + + +def upload_file_to_gcs(local_path: str, bucket: str, remote_path: str): + """ + Upload a file from the local file system to Google Cloud Storage (GCS) + :param local_path: Local filesystem path of file to upload + :param bucket: GCS bucket to upload to + :param remote_path: Path within GCS bucket to upload file to, includes file name + """ + storage_client = storage.Client(project=None) + bucket = storage_client.get_bucket(bucket) + blob = bucket.blob(remote_path) + blob.upload_from_filename(local_path) diff --git a/sdk/python/feast/loaders/ingest.py b/sdk/python/feast/loaders/ingest.py new file mode 100644 index 00000000000..55cc432098e --- /dev/null +++ b/sdk/python/feast/loaders/ingest.py @@ -0,0 +1,162 @@ +import logging +import math +import os +import time +import numpy as np +from itertools import repeat +from multiprocessing import Process, Queue, Pool +import pandas as pd +from kafka import KafkaProducer + +from tqdm import tqdm +from feast.type_map import convert_df_to_feature_rows +from feast.feature_set import FeatureSet + +_logger = logging.getLogger(__name__) + +GRPC_CONNECTION_TIMEOUT_DEFAULT = 3 # type: int +GRPC_CONNECTION_TIMEOUT_APPLY = 300 # type: int +FEAST_SERVING_URL_ENV_KEY = "FEAST_SERVING_URL" # type: str +FEAST_CORE_URL_ENV_KEY = "FEAST_CORE_URL" # type: str +BATCH_FEATURE_REQUEST_WAIT_TIME_SECONDS = 300 +CPU_COUNT = os.cpu_count() # type: int + + +def _kafka_feature_row_chunk_producer( + feature_row_chunk_queue: Queue, chunk_count: int, brokers, topic, progress_bar: tqdm +): + processed_chunks = 0 + rows_processed = 0 + producer = KafkaProducer(bootstrap_servers=brokers) + while processed_chunks < chunk_count: + if feature_row_chunk_queue.empty(): + time.sleep(0.1) + else: + feature_rows = feature_row_chunk_queue.get() + rows_processed += len(feature_rows) + for row in feature_rows: + progress_bar.update() + producer.send(topic, row.SerializeToString()) + + producer.flush() + progress_bar.refresh() + processed_chunks += 1 + + +def _encode_chunk(df: pd.DataFrame, feature_set: FeatureSet): + # Encode dataframe chunk into feature rows chunk + return df.apply(convert_df_to_feature_rows(df, feature_set), axis=1, raw=True) + + +def ingest_kafka( + feature_set: FeatureSet, + dataframe: pd.DataFrame, + max_workers: int, + chunk_size: int = 5000, + disable_progress_bar: bool = False, +): + progress_bar = tqdm( + unit="rows", total=dataframe.shape[0], disable=disable_progress_bar + ) + + # Validate feature set schema + validate_dataframe(dataframe, feature_set) + + # Split dataframe into chunks + num_chunks = max(dataframe.shape[0] / max(chunk_size, 100), 1) + df_chunks = np.array_split(dataframe, num_chunks) + + # Create queue through which encoding and ingestion will coordinate + chunk_queue = Queue() + + # Start ingestion process to push feature rows to Kafka + ingestion_process = Process( + target=_kafka_feature_row_chunk_producer, + args=( + chunk_queue, + num_chunks, + feature_set.get_kafka_source_brokers(), + feature_set.get_kafka_source_topic(), + progress_bar, + ), + ) + + try: + # Start ingestion process + ingestion_process.start() + + # Create a pool of workers to convert df chunks into feature row chunks + # and push them into the queue for ingestion to pick up + with Pool(processes=max_workers) as pool: + chunks_done = 0 + while chunks_done < num_chunks: + chunks_to = min(chunks_done + max_workers, len(df_chunks)) + results = pool.starmap_async( + _encode_chunk, + zip(df_chunks[chunks_done:chunks_to], repeat(feature_set)), + ) + for result in results.get(): + chunk_queue.put(result) + chunks_done += max_workers + except Exception as ex: + _logger.error(f"Exception occurred: {ex}") + finally: + ingestion_process.join() + rows_ingested = progress_bar.total + progress_bar.close() + print( + f"\nIngested {rows_ingested} rows into {feature_set.name}:{feature_set.version}" + ) + + +def ingest_file( + client, + file_path: str, + force_update: bool = False, + timeout: int = 5, + max_workers=CPU_COUNT, +): + """ + Load the contents of a file into a Kafka topic. + Files that are currently supported: + * csv + * parquet + :param file_path: Valid string path to the file + :param force_update: Flag to update feature set from dataset and reregister if changed. + :param timeout: Timeout in seconds to wait for completion + :param max_workers: The maximum number of threads that can be used to execute the given calls. + :return: + """ + df = None + filename, file_ext = os.path.splitext(file_path) + if ".parquet" in file_ext: + df = pd.read_parquet(file_path) + elif ".csv" in file_ext: + df = pd.read_csv(file_path, index_col=False) + try: + # Ensure that dataframe is initialised + assert isinstance(df, pd.DataFrame) + except AssertionError: + _logger.error(f"Ingestion of file type {file_ext} is not supported") + raise Exception("File type not supported") + + client.ingest(df, force_update, timeout, max_workers) + + +def validate_dataframe(dataframe: pd.DataFrame, fs: FeatureSet): + if "datetime" not in dataframe.columns: + raise ValueError( + f'Dataframe does not contain entity "datetime" in columns {dataframe.columns}' + ) + + for entity in fs.entities: + if entity.name not in dataframe.columns: + raise ValueError( + f"Dataframe does not contain entity {entity.name} in columns {dataframe.columns}" + ) + + for feature in fs.features: + if feature.name not in dataframe.columns: + raise ValueError( + f"Dataframe does not contain feature {feature.name} in columns {dataframe.columns}" + ) diff --git a/sdk/python/feast/loaders/yaml.py b/sdk/python/feast/loaders/yaml.py new file mode 100644 index 00000000000..27c6d167c12 --- /dev/null +++ b/sdk/python/feast/loaders/yaml.py @@ -0,0 +1,48 @@ +import yaml + + +def yaml_loader(yml, load_single=False): + """ + Loads one or more Feast resources from a YAML path or string. Multiple resources + can be divided by three hyphens '---' + :param yml: A path ending in .yaml or .yml, or a YAML string + :param load_single: Expect only a single YAML resource, fail otherwise + :return: Either a single YAML dictionary or a list of YAML dictionaries + """ + if ( + isinstance(yml, str) + and yml.count("\n") == 0 + and (".yaml" in yml.lower() or ".yml" in yml.lower()) + ): + with open(yml, "r") as f: + yml_content = f.read() + + elif isinstance(yml, str) and "kind" in yml.lower(): + yml_content = yml + else: + raise Exception( + f"Invalid YAML provided. Please provide either a file path or YAML string: ${yml}" + ) + + yaml_strings = yml_content.strip("---").split("---") + + # Return a single resource dict + if load_single: + if len(yaml_strings) > 1: + raise Exception( + f"More than one YAML file is being loaded when only a single file is supported: ${yaml_strings}" + ) + return yaml_to_dict(yaml_strings[0]) + + # Return a list of resource dicts + resources = [] + for yaml_string in yaml_strings: + resources.append(yaml_to_dict(yaml_string)) + return resources + + +def yaml_to_dict(yaml_string): + yaml_dict = yaml.safe_load(yaml_string) + if not isinstance(yaml_dict, dict) or not "kind" in yaml_dict: + raise Exception(f"Could not detect YAML kind from resource: ${yaml_string}") + return yaml_dict diff --git a/sdk/python/feast/resource.py b/sdk/python/feast/resource.py new file mode 100644 index 00000000000..5eb7f515e63 --- /dev/null +++ b/sdk/python/feast/resource.py @@ -0,0 +1,10 @@ +from feast.feature_set import FeatureSet + + +class ResourceFactory: + @staticmethod + def get_resource(kind): + if kind == "feature_set": + return FeatureSet + else: + raise ValueError(kind) diff --git a/sdk/python/feast/sdk/client.py b/sdk/python/feast/sdk/client.py deleted file mode 100644 index 79db6575ff8..00000000000 --- a/sdk/python/feast/sdk/client.py +++ /dev/null @@ -1,471 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Main interface for users to interact with the Core API. -""" - -import os -from datetime import datetime - -import grpc -import pandas as pd -from google.protobuf.timestamp_pb2 import Timestamp - -from feast.core.CoreService_pb2_grpc import CoreServiceStub -from feast.core.JobService_pb2 import JobServiceTypes -from feast.core.JobService_pb2_grpc import JobServiceStub -from feast.core.DatasetService_pb2 import DatasetServiceTypes -from feast.core.DatasetService_pb2_grpc import DatasetServiceStub -from feast.sdk.env import FEAST_CORE_URL_ENV_KEY, FEAST_SERVING_URL_ENV_KEY -from feast.sdk.resources.entity import Entity -from feast.sdk.resources.feature import Feature -from feast.sdk.resources.feature_group import FeatureGroup -from feast.sdk.resources.feature_set import DatasetInfo, FileType -from feast.sdk.resources.storage import Storage -from feast.sdk.utils.bq_util import TableDownloader -from feast.sdk.utils.print_utils import spec_to_yaml -from feast.sdk.utils import types -from feast.serving.Serving_pb2 import QueryFeaturesRequest -from feast.serving.Serving_pb2_grpc import ServingAPIStub - - -class Client: - def __init__(self, core_url=None, serving_url=None, verbose=False): - """Create an instance of Feast client which is connected to feast - endpoint specified in the parameter. If no url is provided, the - client will default to the url specified in the environment variable - FEAST_CORE_URL. - - Args: - core_url (str, optional): feast's grpc endpoint URL - (e.g.: "my.feast.com:8433") - serving_url (str, optional): feast serving's grpc endpoint URL - (e.g.: "my.feast.com:8433") - """ - - if core_url is None: - core_url = os.getenv(FEAST_CORE_URL_ENV_KEY) - self._core_url = core_url - - if serving_url is None: - serving_url = os.getenv(FEAST_SERVING_URL_ENV_KEY) - self._serving_url = serving_url - - self.__core_channel = None - self.__serving_channel = None - self._core_service_stub = None - self._job_service_stub = None - self._dataset_service_stub = None - self._serving_service_stub = None - - self._verbose = verbose - self._table_downloader = TableDownloader() - - @property - def core_url(self): - if self._core_url is None: - self._core_url = os.getenv(FEAST_CORE_URL_ENV_KEY) - if self._core_url is None: - raise ValueError( - "Core API URL not set. Either set the " - + "environment variable {} or set it explicitly.".format( - FEAST_CORE_URL_ENV_KEY - ) - ) - return self._core_url - - @core_url.setter - def core_url(self, value): - self._core_url = value - - @property - def serving_url(self): - if self._serving_url is None: - self._serving_url = os.getenv(FEAST_SERVING_URL_ENV_KEY) - if self._serving_url is None: - raise ValueError( - "Serving API URL not set. Either set the " - + "environment variable {} or set it explicitly.".format( - FEAST_SERVING_URL_ENV_KEY - ) - ) - return self._serving_url - - @serving_url.setter - def serving_url(self, value): - self._serving_url = value - - @property - def verbose(self): - return self._verbose - - @verbose.setter - def verbose(self, val): - if not isinstance(val, bool): - raise TypeError("verbose should be a boolean value") - self._verbose = val - - def apply(self, obj): - """Create or update one or many feast's resource - (feature, entity, importer, storage). - - Args: - obj (object): one or many feast's resource - // create_entity (bool, optional): (default: {None}) - // create_features (bool, optional): [description] (default: {None}) - """ - if isinstance(obj, list): - ids = [] - for resource in obj: - ids.append(self._apply(resource)) - return ids - else: - return self._apply(obj) - - def run( - self, importer, name_override=None, apply_entity=False, apply_features=False - ): - """ - Run an import job - Args: - importer (feast.sdk.importer.Importer): importer instance - name_override (str, optional): Job name override - apply_entity (bool, optional): (default: False) create/update - entity inside importer - apply_features (bool, optional): (default: False) create/update - features inside importer - - Returns: - (str) job ID of the import job - """ - request = JobServiceTypes.SubmitImportJobRequest(importSpec=importer.spec) - if name_override is not None: - request.name = name_override - - if apply_entity: - self._apply_entity(importer.entity) - if apply_features: - for feature in importer.features: - self._apply_feature(importer.features[feature]) - - if importer.require_staging: - print("Staging file to remote path {}".format(importer.remote_path)) - importer.stage() - print("Submitting job with spec:\n {}".format(spec_to_yaml(importer.spec))) - self._connect_core() - response = self._job_service_stub.SubmitJob(request) - print("Submitted job with id: {}".format(response.jobId)) - return response.jobId - - def create_dataset( - self, feature_set, start_date, end_date, limit=None, - name_prefix=None, filters=None - ): - """ - Create training dataset for a feature set. The training dataset - will be bounded by event timestamp between start_date and end_date. - Specify limit to limit number of row returned. The training dataset - will reside in a bigquery table specified by destination. - - Args: - feature_set (feast.sdk.resources.feature_set.FeatureSet): - feature set representing the data wanted - start_date (str): starting date of the training data in ISO 8601 - format (e.g.: "2018-12-31") - end_date (str): end date of training data in ISO 8601 format (e.g.: - "2018-12-31") - limit (int, optional): (default: None) maximum number of row - returned - name_prefix (str, optional): (default: None) name prefix. - filters (dict, optional): (default: None) conditional clause - that will be used to filter dataset. Keys of filters could be - feature id or job_id. - :return: - feast.resources.feature_set.DatasetInfo: DatasetInfo containing - the information of training dataset. - """ - self._check_create_dataset_args(feature_set, start_date, end_date, - limit, filters) - - conv_filters = None - if filters is not None: - conv_filters = {} - for k, v in filters.items(): - conv_filters[str(k)] = str(v) - - req = DatasetServiceTypes.CreateDatasetRequest( - featureSet=feature_set.proto, - startDate=_timestamp_from_datetime(_parse_date(start_date)), - endDate=_timestamp_from_datetime(_parse_date(end_date)), - limit=limit, - namePrefix=name_prefix, - filters=conv_filters - ) - if self.verbose: - print( - "creating training dataset for features: " + str(feature_set.features) - ) - self._connect_core() - resp = self._dataset_service_stub.CreateDataset(req) - - if self.verbose: - print( - "created dataset {}: {}".format( - resp.datasetInfo.name, resp.datasetInfo.tableUrl - ) - ) - return DatasetInfo(resp.datasetInfo.name, resp.datasetInfo.tableUrl) - - def get_serving_data(self, feature_set, entity_keys, ts_range=None): - """Get feature value from feast serving API. - - If server_url is not provided, the value stored in the environment variable - FEAST_SERVING_URL is used to connect to the serving server instead. - - Args: - feature_set (feast.sdk.resources.feature_set.FeatureSet): feature set - representing the data wanted - entity_keys (:obj: `list` of :obj: `str): list of entity keys - ts_range (:obj: `list` of str, optional): size 2 list of start - and end time, in datetime type. It will - filter out any feature value having event timestamp outside - of the ts_range. - - Returns: - pandas.DataFrame: DataFrame of results - """ - start = None - end = None - if ts_range is not None: - if len(ts_range) != 2: - raise ValueError("ts_range must have len 2") - start = ts_range[0] - end = ts_range[1] - if type(start) is not datetime or type(end) is not datetime: - raise TypeError("start and end must be datetime type") - - request = self._build_serving_request(feature_set, entity_keys) - self._connect_serving() - return self._response_to_df( - feature_set, self._serving_service_stub.QueryFeatures(request), start, end - ) - - def download_dataset( - self, dataset_info, dest, staging_location, file_type=FileType.CSV - ): - """ - Download training dataset as file - Args: - dataset_info (feast.sdk.resources.feature_set.DatasetInfo) : - dataset_info to be downloaded - dest (str): destination's file path - staging_location (str): url to staging_location (currently - support a folder in GCS) - file_type (feast.sdk.resources.feature_set.FileType): (default: - FileType.CSV) exported file format - Returns: - str: path to the downloaded file - """ - return self._table_downloader.download_table_as_file( - dataset_info.full_table_id, dest, staging_location, file_type - ) - - def download_dataset_to_df(self, dataset_info, staging_location): - """ - Download training dataset as Pandas Dataframe - Args: - dataset_info (feast.sdk.resources.feature_set.DatasetInfo) : - dataset_info to be downloaded - staging_location: url to staging_location (currently - support a folder in GCS) - - Returns: pandas.DataFrame: dataframe of the training dataset - - """ - return self._table_downloader.download_table_as_df( - dataset_info.full_table_id, staging_location - ) - - def close(self): - """ - Close underlying connection to Feast's core and serving end points. - """ - self.__core_channel.close() - self.__core_channel = None - self.__serving_channel.close() - self.__serving_channel = None - - def _connect_core(self): - """Connect to core api""" - if self.__core_channel is None: - self.__core_channel = grpc.insecure_channel(self.core_url) - self._core_service_stub = CoreServiceStub(self.__core_channel) - self._job_service_stub = JobServiceStub(self.__core_channel) - self._dataset_service_stub = DatasetServiceStub(self.__core_channel) - - def _connect_serving(self): - """Connect to serving api""" - if self.__serving_channel is None: - self.__serving_channel = grpc.insecure_channel(self.serving_url) - self._serving_service_stub = ServingAPIStub(self.__serving_channel) - - def _build_serving_request(self, feature_set, entity_keys): - """Helper function to build serving service request.""" - return QueryFeaturesRequest( - entityName=feature_set.entity, - entityId=entity_keys, - featureId=feature_set.features, - ) - - def _response_to_df(self, feature_set, response, start=None, end=None): - is_filter_time = start is not None and end is not None - df = pd.DataFrame(columns=[feature_set.entity] + feature_set.features) - dtypes = {} - for entity_id in response.entities: - feature_map = response.entities[entity_id].features - row = {response.entityName: entity_id} - for feature_id in feature_map: - v = feature_map[feature_id].value - if is_filter_time: - ts = feature_map[feature_id].timestamp.ToDatetime() - if ts < start or ts > end: - continue - feast_valuetype = v.WhichOneof("val") - if feast_valuetype not in dtypes: - dtypes[feature_id] = types.FEAST_VALUETYPE_TO_DTYPE[feast_valuetype] - v = getattr(v, v.WhichOneof("val")) - row[feature_id] = v - df = df.append(row, ignore_index=True) - return df.astype(dtypes).reset_index(drop=True) - - def _apply(self, obj): - """Applies a single object to feast core. - - Args: - obj (object): one of - [Feature, Entity, FeatureGroup, Storage, Importer] - """ - if isinstance(obj, Feature): - return self._apply_feature(obj) - elif isinstance(obj, Entity): - return self._apply_entity(obj) - elif isinstance(obj, FeatureGroup): - return self._apply_feature_group(obj) - elif isinstance(obj, Storage): - return self._apply_storage(obj) - else: - raise TypeError( - "Apply can only be passed one of the following \ - types: [Feature, Entity, FeatureGroup, Storage, Importer]" - ) - - def _apply_feature(self, feature): - """Apply the feature to the core API - - Args: - feature (feast.sdk.resources.feature.Feature): feature to apply - """ - self._connect_core() - response = self._core_service_stub.ApplyFeature(feature.spec) - if self.verbose: - print( - "Successfully applied feature with id: {}\n---\n{}".format( - response.featureId, feature - ) - ) - return response.featureId - - def _apply_entity(self, entity): - """Apply the entity to the core API - - Args: - entity (feast.sdk.resources.entity.Entity): entity to apply - """ - self._connect_core() - response = self._core_service_stub.ApplyEntity(entity.spec) - if self.verbose: - print( - "Successfully applied entity with name: {}\n---\n{}".format( - response.entityName, entity - ) - ) - return response.entityName - - def _apply_feature_group(self, feature_group): - """Apply the feature group to the core API - - Args: - feature_group (feast.sdk.resources.feature_group.FeatureGroup): - feature group to apply - """ - self._connect_core() - response = self._core_service_stub.ApplyFeatureGroup(feature_group.spec) - if self.verbose: - print( - "Successfully applied feature group with id: " - + "{}\n---\n{}".format(response.featureGroupId, feature_group) - ) - return response.featureGroupId - - def _apply_storage(self, storage): - """Apply the storage to the core API - - Args: - storage (feast.sdk.resources.storage.Storage): storage to apply - """ - self._connect_core() - response = self._core_service_stub.ApplyStorage(storage.spec) - if self.verbose: - print( - "Successfully applied storage with id: " - + "{}\n{}".format(response.storageId, storage) - ) - return response.storageId - - def _check_create_dataset_args(self, feature_set, start_date, end_date, - limit, filters): - if len(feature_set.features) < 1: - raise ValueError("feature set is empty") - - start = _parse_date(start_date) - end = _parse_date(end_date) - if end < start: - raise ValueError("end_date is before start_date") - - if limit is not None and limit < 1: - raise ValueError("limit is not a positive integer") - - if filters is not None and not isinstance(filters, dict): - raise ValueError("filters is not dictionary") - - -def _parse_date(date): - try: - return datetime.strptime(date, "%Y-%m-%d") - except ValueError: - raise ValueError("Incorrect date format, should be YYYY-MM-DD") - - -def _timestamp_from_datetime(dt): - """Convert datetime to protobuf timestamp - - Args: - dt (datetime.datetime): datetime in datetime format - - Returns: - google.protobuf.timestamp_pb2.Timestamp: timestamp in protobuf format - """ - ts = Timestamp() - ts.FromDatetime(dt) - return ts diff --git a/sdk/python/feast/sdk/importer.py b/sdk/python/feast/sdk/importer.py deleted file mode 100644 index 0a86f481203..00000000000 --- a/sdk/python/feast/sdk/importer.py +++ /dev/null @@ -1,482 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pandas as pd -import ntpath -import time -import datetime -from feast.specs.ImportSpec_pb2 import ImportSpec, Schema -from feast.sdk.utils.gs_utils import gcs_to_df, is_gs_path, df_to_gcs -from feast.sdk.utils.print_utils import spec_to_yaml -from feast.sdk.utils.types import dtype_to_value_type -from feast.sdk.utils.bq_util import head -from feast.sdk.resources.feature import Feature -from feast.sdk.resources.entity import Entity - -from google.protobuf.timestamp_pb2 import Timestamp -from google.cloud import bigquery - - -class Importer: - def __init__(self, specs, df, properties): - self._properties = properties - self._specs = specs - self.df = df - - @property - def source(self): - """str: source of the data""" - return self._properties.get("source") - - @property - def size(self): - """str: number of rows in the data""" - return self._properties.get("size") - - @property - def require_staging(self): - """bool: whether the data needs to be staged""" - return self._properties.get("require_staging") - - @property - def remote_path(self): - """str: remote path of the file""" - return self._properties.get("remote_path") - - @property - def spec(self): - """feast.specs.ImportSpec_pb2.ImportSpec: - import spec for this dataset""" - return self._specs.get("import") - - @property - def features(self): - """list[feast.specs.FeatureSpec_pb2.FeatureSpec]: - list of features associated with this dataset""" - return self._specs.get("features") - - @property - def entity(self): - """feast.specs.EntitySpec_pb2.EntitySpec: - entity associated with this dataset""" - return self._specs.get("entity") - - @classmethod - def from_csv(cls, - path, - entity, - owner, - staging_location=None, - id_column=None, - feature_columns=None, - timestamp_column=None, - timestamp_value=None, - serving_store=None, - warehouse_store=None, - job_options={}): - """Creates an importer from a given csv dataset. - This file can be either local or remote (in gcs). If it's a local file - then staging_location must be determined. - - Args: - path (str): path to csv file - entity (str): entity id - owner (str): owner - staging_location (str, optional): Defaults to None. Staging location - for ingesting a local csv file. - id_column (str, optional): Defaults to None. Id column in the csv. - If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns - to ingest. If not set, the importer will by default ingest all - available columns. - timestamp_column (str, optional): Defaults to None. Timestamp - column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. - Timestamp value to assign to all features in the dataset. - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. - job_options (dict): Defaults to empty dict. Additional job options. - - Returns: - Importer: the importer for the dataset provided. - """ - src_type = "file.csv" - source_options = {} - source_options["path"], require_staging = \ - _get_remote_location(path, staging_location) - if is_gs_path(path): - df = gcs_to_df(path) - else: - df = pd.read_csv(path) - schema, features = \ - _detect_schema_and_feature(entity, owner, id_column, - feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) - iport_spec = _create_import(src_type, source_options, job_options, - entity, schema) - - props = (_properties(src_type, len(df.index), require_staging, - source_options["path"])) - specs = _specs(iport_spec, Entity(name=entity), features) - - return cls(specs, df, props) - - @classmethod - def from_bq(cls, - bq_path, - entity, - owner, - limit=10, - id_column=None, - feature_columns=None, - timestamp_column=None, - timestamp_value=None, - serving_store=None, - warehouse_store=None, - job_options={}): - """Creates an importer from a given bigquery table. - - Args: - bq_path (str): path to bigquery table, in the format - project.dataset.table - entity (str): entity id - owner (str): owner - limit (int, optional): Defaults to 10. The maximum number of rows to - read into the importer df. - id_column (str, optional): Defaults to None. Id column in the csv. - If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns - to ingest. If not set, the importer will by default ingest all - available columns. - timestamp_column (str, optional): Defaults to None. Timestamp - column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. - Timestamp value to assign to all features in the dataset. - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. - job_options (dict): Defaults to empty dict. Additional job options. - - Returns: - Importer: the importer for the dataset provided. - """ - - cli = bigquery.Client() - project, dataset_id, table_id = bq_path.split(".") - dataset_ref = cli.dataset(dataset_id, project=project) - table_ref = dataset_ref.table(table_id) - table = cli.get_table(table_ref) - - source_options = { - "project": project, - "dataset": dataset_id, - "table": table_id - } - df = head(cli, table, limit) - schema, features = \ - _detect_schema_and_feature(entity, owner, id_column, - feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) - iport_spec = _create_import("bigquery", source_options, job_options, - entity, schema) - - props = _properties("bigquery", table.num_rows, False, None) - specs = _specs(iport_spec, Entity(name=entity), features) - return cls(specs, df, props) - - @classmethod - def from_df(cls, - df, - entity, - owner, - staging_location, - id_column=None, - feature_columns=None, - timestamp_column=None, - timestamp_value=None, - serving_store=None, - warehouse_store=None, - job_options={}): - """Creates an importer from a given pandas dataframe. - To import a file from a dataframe, the data will have to be staged. - - Args: - path (str): path to csv file - entity (str): entity id - owner (str): owner - staging_location (str): Defaults to None. Staging location - for ingesting a local csv file. - id_column (str, optional): Defaults to None. Id column in the csv. - If not set, will default to the `entity` argument. - feature_columns ([str], optional): Defaults to None. Feature columns - to ingest. If not set, the importer will by default ingest all - available columns. - timestamp_column (str, optional): Defaults to None. Timestamp - column in the csv. If not set, defaults to timestamp value. - timestamp_value (datetime, optional): Defaults to current datetime. - Timestamp value to assign to all features in the dataset. - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. - job_options (dict): Defaults to empty dict. Additional job options. - - Returns: - Importer: the importer for the dataset provided. - """ - tmp_file_name = ("tmp_{}_{}.csv".format(entity, - int(round( - time.time() * 1000)))) - src_type = "file.csv" - source_options = {} - source_options["path"], require_staging = (_get_remote_location( - tmp_file_name, staging_location)) - schema, features = \ - _detect_schema_and_feature(entity, owner, id_column, - feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) - iport_spec = _create_import(src_type, source_options, job_options, - entity, schema) - - props = _properties("dataframe", len(df.index), require_staging, - source_options["path"]) - specs = _specs(iport_spec, Entity(name=entity), features) - - return cls(specs, df, props) - - def stage(self): - """Stage the data to its remote location - """ - if not self.require_staging: - return - ts_col = self.spec.schema.timestampColumn - if ts_col != "": - _convert_timestamp(self.df, ts_col) - df_to_gcs(self.df, self.remote_path) - - def describe(self): - """Print out the import spec. - """ - print(spec_to_yaml(self.spec)) - - def dump(self, path): - """Dump the import spec to the provided path - - Arguments: - path (str): path to dump the spec to - """ - - with open(path, 'w') as f: - f.write(spec_to_yaml(self.spec)) - print("Saved spec to {}".format(path)) - - -def _convert_timestamp(df, timestamp_col): - """Converts the given df's timestamp column to ISO8601 format - """ - df[timestamp_col] = pd.to_datetime(df[timestamp_col]).dt \ - .strftime("%Y-%m-%dT%H:%M:%S%zZ") - - -def _properties(source, size, require_staging, remote): - """Args: - source (str): source of the data - size (int): number of rows of the dataset - require_staging (bool): whether the file requires staging - remote (str): remote path - - Returns: - dict: set of importer properties - """ - - return { - "source": source, - "size": size, - "require_staging": require_staging, - "remote_path": remote - } - - -def _specs(iport, entity, features): - """Args: - iport {} -- [description] - entity {[type]} -- [description] - features {[type]} -- [description] - - Returns: - [type] -- [description] - """ - - return {"import": iport, "features": features, "entity": entity} - - -def _get_remote_location(path, staging_location): - """Get the remote location of the file - - Args: - path (str): raw path of the file - staging_location (str): path to stage the file - - """ - if is_gs_path(path): - return path, False - - if staging_location is None: - raise ValueError( - "Specify staging_location for importing local file/dataframe") - if not is_gs_path(staging_location): - raise ValueError("Staging location must be in GCS") - - filename = ntpath.basename(path) - return staging_location + "/" + filename, True - - -def _detect_schema_and_feature(entity, owner, id_column, feature_columns, - timestamp_column, timestamp_value, - serving_store, warehouse_store, df): - """Create schema object for import spec. - - Args: - entity (str): entity name - id_column (str): column name of entity id - timestamp_column (str): column name of timestamp - timestamp_value (datetime.datetime): timestamp to apply to all - rows in dataset - feature_columns (str): list of column to be extracted - df (pandas.Dataframe): pandas dataframe of the data - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. - - Returns: - feast.specs.ImportSpec_pb2.Schema: schema of the data - dict of str: feast.specs.FeatureSpec_pb2.FeatureSpec: features in the data - - Raises: - Exception -- [description] - """ - - schema = Schema() - if id_column is not None: - schema.entityIdColumn = id_column - elif entity in df.columns: - schema.entityIdColumn = entity - else: - raise ValueError("Column with name {} is not found".format(entity)) - - if timestamp_column is not None: - schema.timestampColumn = timestamp_column - else: - if timestamp_value is None: - ts = Timestamp() - ts.GetCurrentTime() - else: - ts = Timestamp( - seconds=int((timestamp_value - - datetime.datetime(1970, 1, 1)).total_seconds())) - schema.timestampValue.CopyFrom(ts) - - features = {} - if feature_columns is not None: - # check if all column exist and create feature accordingly - for column in feature_columns: - if column not in df.columns: - raise ValueError( - "Column with name {} is not found".format(column)) - features[column] = _create_feature(df[column], entity, owner, - serving_store, warehouse_store) - else: - # get all column except entity id and timestampColumn - feature_columns = list(df.columns.values) - _remove_safely(feature_columns, schema.entityIdColumn) - _remove_safely(feature_columns, schema.timestampColumn) - for column in feature_columns: - features[column] = _create_feature(df[column], entity, owner, - serving_store, warehouse_store) - - for col in df.columns: - field = schema.fields.add() - field.name = col - if col in features: - field.featureId = features[col].id - - features_dict = {} - for k in features: - features_dict[features[k].id] = features[k] - - return schema, features_dict - - -def _create_feature(column, entity, owner, serving_store, warehouse_store): - """Create Feature object. - - Args: - column (pandas.Series): data column - entity (str): entity name - owner (str): owner of the feature - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. - - Returns: - feast.sdk.resources.Feature: feature for this data column - """ - - feature = Feature( - name=column.name, - entity=entity, - owner=owner, - value_type=dtype_to_value_type(column.dtype)) - if serving_store is not None: - feature.serving_store = serving_store - if warehouse_store is not None: - feature.warehouse_store = warehouse_store - return feature - - -def _create_import(import_type, source_options, job_options, entity, schema): - """Create an import spec. - - Args: - import_type (str): import type - source_options (dict): import spec source options - jobOptions (dict): import spec job options - entity (str): entity - schema (feast.specs.ImportSpec_pb2.Schema): schema of the file - - Returns: - feast.specs.ImportSpec_pb2.ImportSpec: import spec - """ - - return ImportSpec( - type=import_type, - sourceOptions=source_options, - jobOptions=job_options, - entities=[entity], - schema=schema) - - -def _remove_safely(columns, column): - try: - columns.remove(column) - except ValueError: - pass diff --git a/sdk/python/feast/sdk/resources/entity.py b/sdk/python/feast/sdk/resources/entity.py deleted file mode 100644 index ad9b566c8e5..00000000000 --- a/sdk/python/feast/sdk/resources/entity.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import yaml -from feast.specs.EntitySpec_pb2 import EntitySpec -from feast.sdk.utils.print_utils import spec_to_yaml -from google.protobuf.json_format import Parse - - -class Entity: - """ - Wrapper class for feast entity - """ - - def __init__(self, name="", description="", tags=[]): - """ - Create Entity instance. - - Args: - name (str): name of entity - description (str): description of entity - tags (list[str], optional): defaults to []. - list of tags for this entity - """ - self.__spec = EntitySpec(name=name, description=description, tags=tags) - - @property - def spec(self): - return self.__spec - - @property - def name(self): - return self.__spec.name - - @name.setter - def name(self, value): - self.__spec.name = value - - @property - def description(self): - return self.__spec.description - - @description.setter - def description(self, value): - self.__spec.description = value - - @property - def tags(self): - return self.__spec.tags - - @tags.setter - def tags(self, value): - del self.__spec.tags[:] - self.__spec.tags.extend(value) - - @classmethod - def from_yaml(cls, path): - """Create an instance of entity from a yaml file - - Args: - path (str): path to yaml file - """ - with open(path, 'r') as file: - content = yaml.safe_load(file.read()) - entity = cls() - entity.__spec = Parse( - json.dumps(content), EntitySpec(), ignore_unknown_fields=False) - return entity - - def create_feature(self, name, value_type, owner, description): - """Create a feature related to this entity - - Args: - name (str): feature name - value_type (feast.types.ValueType_pb2.ValueType): value type of - the feature - owner (str): owner of the feature - description (str): feature's description - """ - pass - - def __str__(self): - """Print the feature in yaml format - - Returns: - str: yaml formatted representation of the entity - """ - return spec_to_yaml(self.__spec) - - def dump(self, path): - """Dump the feature into a yaml file. - It will replace content of an existing file. - - Args: - path (str): destination file path - """ - with open(path, 'w') as file: - file.write(str(self)) - print("Saved spec to {}".format(path)) diff --git a/sdk/python/feast/sdk/resources/feature.py b/sdk/python/feast/sdk/resources/feature.py deleted file mode 100644 index 714a1404511..00000000000 --- a/sdk/python/feast/sdk/resources/feature.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import enum -import json - -import yaml -from google.protobuf.json_format import Parse - -from feast.sdk.utils.print_utils import spec_to_yaml -from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore - - -class ValueType(enum.Enum): - """ - Type of the feature's value - """ - UNKNOWN = 0 - BYTES = 1 - STRING = 2 - INT32 = 3 - INT64 = 4 - DOUBLE = 5 - FLOAT = 6 - BOOL = 7 - TIMESTAMP = 8 - - -class Feature: - """ - Wrapper class for feast feature - """ - - def __init__(self, - name='', - entity='', - owner='', - value_type=ValueType.DOUBLE, - description='', - uri='', - warehouse_store=None, - serving_store=None, - group='', - tags=[], - options={}): - """Create feast feature instance. - - Args: - name (str): name of feature, in lower snake case - entity (str): entity the feature belongs to, in lower case - owner (str): owner of the feature - value_type (feast.sdk.resources.feature.ValueType): defaults to - ValueType.DOUBLE. value type of the feature - description (str): defaults to "". description of the feature - uri (str): defaults to "". uri pointing to the source code or - origin of this feature - warehouse_store (feast.sdk.resources.feature.Datastore): - warehouse store id and options - serving_store (feast.sdk.resources.feature.Datastore): serving - store id and options - group (str, optional): feature group to inherit from - tags (list[str], optional): tags assigned to the feature - options (dic, optional): additional options for the feature - """ - id = '{}.{}'.format(entity, name).lower() - warehouse_store_spec = None - serving_store_spec = None - if serving_store is not None: - serving_store_spec = serving_store.spec - if warehouse_store is not None: - warehouse_store_spec = warehouse_store.spec - data_stores = DataStores( - serving=serving_store_spec, warehouse=warehouse_store_spec) - self.__spec = FeatureSpec( - id=id, - name=name, - entity=entity, - owner=owner, - dataStores=data_stores, - description=description, - uri=uri, - valueType=value_type.value, - group=group, - tags=tags, - options=options) - - @property - def spec(self): - return self.__spec - - @property - def id(self): - return self.__spec.id - - @property - def name(self): - return self.__spec.name - - @name.setter - def name(self, value): - self.__spec.name = value - id_split = self.id.split('.') - id_split[1] = value - self.__spec.id = '.'.join(id_split) - - @property - def entity(self): - return self.__spec.entity - - @entity.setter - def entity(self, value): - self.__spec.entity = value - id_split = self.id.split('.') - id_split[0] = value - self.__spec.id = '.'.join(id_split) - - @property - def owner(self): - return self.__spec.owner - - @owner.setter - def owner(self, value): - self.__spec.owner = value - - @property - def warehouse_store(self): - return self.__spec.dataStores.warehouse - - @warehouse_store.setter - def warehouse_store(self, value): - """Set warehouse store from given Datastore""" - self.__spec.dataStores.warehouse.CopyFrom(value.spec) - - @property - def serving_store(self): - return self.__spec.dataStores.serving - - @serving_store.setter - def serving_store(self, value): - """Set serving store from given Datastore""" - self.__spec.dataStores.serving.CopyFrom(value.spec) - - @property - def description(self): - return self.__spec.description - - @description.setter - def description(self, value): - self.__spec.description = value - - @property - def uri(self): - return self.__spec.uri - - @uri.setter - def uri(self, value): - self.__spec.uri = value - - @property - def value_type(self): - return ValueType(self.__spec.valueType) - - @value_type.setter - def value_type(self, value): - self.__spec.valueType = value - - @property - def group(self): - return self.__spec.group - - @group.setter - def group(self, value): - self.__spec.group = value - - @property - def tags(self): - return self.__spec.tags - - @tags.setter - def tags(self, value): - del self.__spec.tags[:] - self.__spec.tags.extend(value) - - @property - def options(self): - return self.__spec.options - - @options.setter - def options(self, value): - for key in self.__spec.options: - del self.__spec.options[key] - for (key, value) in value.items(): - self.__spec.options[key] = value - - @classmethod - def from_yaml(cls, path): - """Create an instance of feature from a yaml spec file - - Args: - path (str): path to yaml spec file - """ - - with open(path, 'r') as file: - content = yaml.safe_load(file.read()) - feature = cls() - feature.__spec = Parse( - json.dumps(content), - FeatureSpec(), - ignore_unknown_fields=False) - return feature - - def __str__(self): - """Print the feature in yaml format - - Returns: - str: yaml formatted representation of the entity - """ - return spec_to_yaml(self.__spec) - - def dump(self, path): - """Dump the feature into a yaml file. - It will replace content of an existing file. - - Args: - path (str): destination file path - """ - with open(path, 'w') as file: - file.write(str(self)) - print("Saved spec to {}".format(path)) - - -class Datastore: - def __init__(self, id, options={}): - self.__spec = DataStore(id=id, options=options) - - def __str__(self): - """Print the datastore in yaml format - - Returns: - str: yaml formatted representation of the Datastore - """ - return spec_to_yaml(self.__spec) - - @property - def spec(self): - return self.__spec diff --git a/sdk/python/feast/sdk/resources/feature_group.py b/sdk/python/feast/sdk/resources/feature_group.py deleted file mode 100644 index 3f73c571ca4..00000000000 --- a/sdk/python/feast/sdk/resources/feature_group.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import yaml -import json - -from feast.specs.FeatureSpec_pb2 import DataStores -from feast.specs.FeatureGroupSpec_pb2 import FeatureGroupSpec -from feast.sdk.utils.print_utils import spec_to_yaml -from google.protobuf.json_format import Parse - - -class FeatureGroup(): - """ - Wrapper class for feast feature group - """ - - def __init__(self, id, tags=[], warehouse_store=None, serving_store=None): - """Create FeatureGroup instance. - - Args: - id (str): id of feature group - tags (list): Defaults to []. tags assigned to feature group - as well as all children features. - warehouse_store (feast.sdk.resources.feature.Datastore): - warehouse store id and options - serving_store (feast.sdk.resources.feature.Datastore): - serving store id and options - """ - warehouse_store_spec = None - serving_store_spec = None - if (serving_store is not None): - serving_store_spec = serving_store.spec - if (warehouse_store is not None): - warehouse_store_spec = warehouse_store.spec - data_stores = DataStores( - serving=serving_store_spec, warehouse=warehouse_store_spec) - self.__spec = FeatureGroupSpec( - id=id, tags=tags, dataStores=data_stores) - - @property - def spec(self): - return self.__spec - - @property - def id(self): - return self.__spec.id - - @id.setter - def id(self, value): - self.__spec.id = value - - @property - def warehouse_store(self): - return self.__spec.dataStores.warehouse - - @warehouse_store.setter - def warehouse_store(self, value): - self.__spec.dataStores.serving.CopyFrom(value) - - @property - def serving_store(self): - return self.__spec.dataStores.serving - - @serving_store.setter - def serving_store(self, value): - self.__spec.dataStores.warehouse.CopyFrom(value) - - @property - def tags(self): - return self.__spec.tags - - @tags.setter - def tags(self, value): - del self.__spec.tags[:] - self.__spec.tags.extend(value) - - @classmethod - def from_yaml(cls, path): - """Create an instance of feature group from a yaml spec file - - Args: - path (str): path to yaml spec file - """ - with open(path, 'r') as file: - content = yaml.safe_load(file.read()) - feature_group = cls.__new__(cls) - feature_group.__spec = Parse( - json.dumps(content), - FeatureGroupSpec(), - ignore_unknown_fields=False) - return feature_group - - def __str__(self): - """Return string representation of the feature group - - Returns: - str: yaml formatted representation of the entity - """ - return spec_to_yaml(self.__spec) - - def dump(self, path): - """Dump the feature group into a yaml file. - It will replace content of an existing file. - - Args: - path (str): destination file path - """ - with open(path, 'w') as file: - file.write(str(self)) - print("Saved spec to {}".format(path)) diff --git a/sdk/python/feast/sdk/resources/feature_set.py b/sdk/python/feast/sdk/resources/feature_set.py deleted file mode 100644 index 7eb2870078e..00000000000 --- a/sdk/python/feast/sdk/resources/feature_set.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.core.DatasetService_pb2 import FeatureSet as FeatureSet_pb - - -class FeatureSet: - """ - Represent a collection of features having same entity. - """ - - def __init__(self, entity, features): - self._ensure_same_entity(entity, features) - - self._features = features - self._entity = entity - self._proto = FeatureSet_pb(entityName=entity, featureIds=features) - - @property - def features(self): - """ - Return list of feature ID of this feature set - Returns: list of feature ID in this feature set - - """ - return self._features - - @property - def entity(self): - return self._entity - - @property - def proto(self): - return self._proto - - def _ensure_same_entity(self, entity, features): - for feature in features: - e = feature.split(".")[0] - if e != entity: - raise ValueError("feature set has different entity: " + e) - - -class FileType(object): - """ - File type for downloading training dataset as file - """ - CSV = "CSV" - """CSV file format""" - - JSON = "NEWLINE_DELIMITED_JSON" - """Newline delimited JSON file format""" - - AVRO = "AVRO" - """Avro file format""" - - -class DatasetInfo: - def __init__(self, name, full_table_id): - """ - Create instance of DatasetInfo with a BigQuery table as its - backing store. - Args: - name: (str) dataset name - full_table_id: (str) fully qualified table id - """ - self._name = name - self._full_table_id = full_table_id - - @property - def name(self): - """ - Dataset name - Returns: dataset name - - """ - return self._name - - @property - def full_table_id(self): - """ - - Returns: fully qualified table id - - """ - return self._full_table_id diff --git a/sdk/python/feast/sdk/resources/storage.py b/sdk/python/feast/sdk/resources/storage.py deleted file mode 100644 index 601aad52061..00000000000 --- a/sdk/python/feast/sdk/resources/storage.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import yaml -import json - -from feast.specs.StorageSpec_pb2 import StorageSpec -from feast.sdk.utils.print_utils import spec_to_yaml -from google.protobuf.json_format import Parse - - -class Storage: - """ - Wrapper class for feast storage - """ - - def __init__(self, id="", type="", options={}): - """Create Storage instance. - - Args: - id (str): storage id - type (str): storage type - options (dict, optional) : map of storage options - """ - self.__spec = StorageSpec(id=id, type=type, options=options) - - @property - def spec(self): - return self.__spec - - @property - def id(self): - return self.__spec.id - - @id.setter - def id(self, value): - self.__spec.name = value - - @property - def type(self): - return self.__spec.type - - @type.setter - def type(self, value): - self.__spec.type = value - - @property - def options(self): - return self.__spec.options - - @options.setter - def options(self, value): - self.__spec.options.clear() - self.__spec.options.update(value) - - @classmethod - def from_yaml(cls, path): - """Create an instance of storage from a yaml file - - Args: - path (str): path to yaml file - """ - with open(path, 'r') as file: - content = yaml.safe_load(file.read()) - storage = cls() - storage.__spec = Parse( - json.dumps(content), - StorageSpec(), - ignore_unknown_fields=False) - return storage - - def __str__(self): - """Return string representation the storage in yaml format - - Returns: - str: yaml formatted representation of the entity - """ - return spec_to_yaml(self.__spec) - - def dump(self, path): - """Dump the storage into a yaml file. - It will replace content of an existing file. - - Args: - path (str): destination file path - """ - with open(path, 'w') as file: - file.write(str(self)) - print("Saved spec to {}".format(path)) diff --git a/sdk/python/feast/sdk/utils/__init__.py b/sdk/python/feast/sdk/utils/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/sdk/python/feast/sdk/utils/bq_util.py b/sdk/python/feast/sdk/utils/bq_util.py deleted file mode 100644 index ebc4f243163..00000000000 --- a/sdk/python/feast/sdk/utils/bq_util.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import tempfile -import time -from datetime import datetime -import pytz -import fastavro -import pandas as pd -from google.cloud import bigquery -from google.cloud.bigquery.client import Client as BQClient -from google.cloud.bigquery.job import ExtractJobConfig, DestinationFormat -from google.cloud.bigquery.table import Table -from google.cloud.exceptions import NotFound -from google.cloud.storage import Client as GCSClient -from google.cloud import storage - -from feast.sdk.utils.gs_utils import is_gs_path, split_gs_path, gcs_to_df - - -def head(client, table, max_rows=10): - """Get the head of the table. Retrieves rows from the given table at - minimum cost - - Args: - client (google.cloud.bigquery.client.Client): bigquery client - table (google.cloud.bigquery.table.Table): bigquery table to get the - head of - max_rows (int, optional): Defaults to 10. maximum number of rows to - retrieve - - Returns: - pandas.DataFrame: dataframe containing the head of rows - """ - - rows = client.list_rows(table, max_results=max_rows) - rows = [x for x in rows] - return pd.DataFrame( - data=[list(x.values()) for x in rows], columns=list(rows[0].keys()) - ) - - -def get_table_name(feature_id, storage_spec): - """ - Get fully qualified BigQuery table name from a feature ID and its - storage spec - Args: - feature_id(str): ID of a feature - storage_spec(feast.specs.StorageSpec_pb2.StorageSpec): storage spec of - the feature - - Returns: - str: fully qualified table name of the feature. - - """ - if "bigquery" != storage_spec.type: - raise ValueError("storage spec is not BigQuery storage spec") - - try: - project = storage_spec.options["project"] - dataset = storage_spec.options["dataset"] - except KeyError: - raise ValueError("storage spec has empty project or dataset option") - - table_name = feature_id.split(".")[0] - return ".".join([project, dataset, table_name]) - - -def get_default_templocation(bigquery_client, project=None): - if project is None: - project = bigquery_client.project - assert isinstance(project, str) - assert len(project) > 0 - storage_client = storage.Client() - default_bucket_name = f"feast-templocation-{project}" - try: - storage_client.get_bucket(default_bucket_name) - except NotFound: - print( - f'Default bucket "{default_bucket_name}" not found. Attempting to create it.' - ) - storage_client.create_bucket(bucket_name=default_bucket_name, project=project) - return f"gs://{default_bucket_name}" - - -def query_to_dataframe( - query: str, - bigquery_client: bigquery.Client = None, - storage_client: storage.Client = None, - project: str = None, - templocation: str = None, -) -> pd.DataFrame: - """ - Run a query job on BigQuery and return the result in Pandas DataFrame format - - Args: - query: BigQuery query e.g. "SELECT * FROM dataset.table" - bigquery_client: - storage_client: - project: Google Cloud project id - templocation: Google Cloud Storage location to store intermediate files, must start with "gs://" - - Returns: Pandas DataFrame of the query result - - """ - if isinstance(templocation, str) and not templocation.startswith("gs://"): - raise RuntimeError('templocation must start with "gs://"') - - if bigquery_client is None: - bigquery_client = bigquery.Client(project=project) - - if project is None: - project = bigquery_client.project - - query_job = bigquery_client.query(query, project=project) - query_job_state = "" - - while not query_job.done(): - if query_job.state != query_job_state: - print(f"Query status: {query_job.state}") - query_job_state = query_job.state - time.sleep(5) - - if query_job.state != query_job_state: - print(f"Query status: {query_job.state}") - - if query_job.exception(): - raise query_job.exception() - - if not templocation: - templocation = get_default_templocation(bigquery_client, project=project) - - if templocation.endswith("/"): - templocation += templocation[:-1] - - destination_uri = ( - f"{templocation}/bq-{datetime.now(pytz.utc).strftime('%Y%m%dT%H%M%SZ')}.avro" - ) - extract_job_config = bigquery.job.ExtractJobConfig(destination_format="AVRO") - extract_job = bigquery_client.extract_table( - query_job.destination, destination_uri, job_config=extract_job_config - ) - - while not extract_job.done(): - time.sleep(5) - - if extract_job.exception(): - raise extract_job.exception() - - if not storage_client: - storage_client = storage.Client(project=project) - - print("Reading query result into DataFrame") - - bucket_name, blob_name = ( - destination_uri.split("/")[2], - "/".join(destination_uri.split("/")[3:]), - ) - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.get_blob(blob_name) - downloaded_avro_filename = tempfile.NamedTemporaryFile().name - blob.download_to_filename(downloaded_avro_filename) - - with open(downloaded_avro_filename, "rb") as avro_file: - avro_reader = fastavro.reader(avro_file) - df = pd.DataFrame.from_records(avro_reader) - - return df - - -class TableDownloader: - def __init__(self): - self._bq = None - self._gcs = None - - @property - def gcs(self): - if self._gcs is None: - self._gcs = GCSClient() - return self._gcs - - @property - def bq(self): - if self._bq is None: - self._bq = BQClient() - return self._bq - - def download_table_as_file(self, full_table_id, dest, staging_location, file_type): - """ - Download a bigquery table as file - Args: - full_table_id (str): fully qualified BigQuery table id - dest (str): destination filename - staging_location (str): url to staging_location (currently - support a folder in GCS) - file_type (feast.sdk.resources.feature_set.FileType): (default: - FileType.CSV) exported file format - Returns: (str) path to the downloaded file - - """ - if not is_gs_path(staging_location): - raise ValueError("staging_uri must be a directory in GCS") - - temp_file_name = "temp_{}".format(int(round(time.time() * 1000))) - staging_file_path = os.path.join(staging_location, temp_file_name) - - job_config = ExtractJobConfig() - job_config.destination_format = file_type - src_table = Table.from_string(full_table_id) - job = self.bq.extract_table(src_table, staging_file_path, job_config=job_config) - - # await completion - job.result() - - bucket_name, blob_name = split_gs_path(staging_file_path) - bucket = self.gcs.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - blob.download_to_filename(dest) - return dest - - def download_table_as_df(self, full_table_id, staging_location): - """ - Download a BigQuery table as Pandas Dataframe - Args: - full_table_id (src) : fully qualified BigQuery table id - staging_location: url to staging_location (currently - support a folder in GCS) - - Returns: pandas.DataFrame: dataframe of the training dataset - - """ - if not is_gs_path(staging_location): - raise ValueError("staging_uri must be a directory in GCS") - - temp_file_name = "temp_{}".format(int(round(time.time() * 1000))) - staging_file_path = os.path.join(staging_location, temp_file_name) - - job_config = ExtractJobConfig() - job_config.destination_format = DestinationFormat.CSV - job = self.bq.extract_table( - Table.from_string(full_table_id), staging_file_path, job_config=job_config - ) - - # await completion - job.result() - return gcs_to_df(staging_file_path) diff --git a/sdk/python/feast/sdk/utils/gs_utils.py b/sdk/python/feast/sdk/utils/gs_utils.py deleted file mode 100644 index 2561a115a84..00000000000 --- a/sdk/python/feast/sdk/utils/gs_utils.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import os -import re -import time - -import pandas as pd -from google.cloud import storage - -_GCS_PATH_REGEX = r'^gs:\/\/[a-z0-9\.\-_\/]*$' - - -def gcs_to_df(path): - """Reads a file from gs to pandas - - Args: - path (str): full gcs path to the file - - Returns: - pandas.DataFrame: dataframe - """ - bucket_name, blob_name = split_gs_path(path) - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - temp_file_path = 'temp{}.csv'.format(int(round(time.time() * 1000))) - with open(temp_file_path, 'wb') as temp_file: - blob.download_to_file(temp_file) - df = pd.read_csv(temp_file_path) - os.remove(temp_file_path) - return df - - -def df_to_gcs(df, path): - """Writes the given df to the path specified. Will fail if the bucket does - not exist. - - Args: - df (pandas.DataFrame): dataframe - path (str): path in gcs to write to - """ - bucket_name, blob_name = split_gs_path(path) - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - s = io.StringIO() - df.to_csv(s, index=False) - blob.upload_from_string(s.getvalue()) - - -def split_gs_path(path): - path = path.replace("gs://", "", 1) - return path.split('/', 1) - - -def is_gs_path(path): - """Check if path is a gcs path - - Args: - path (str): path to file - - Returns: - bool: is a valid gcs path - """ - return re.match(_GCS_PATH_REGEX, path) != None diff --git a/sdk/python/feast/sdk/utils/print_utils.py b/sdk/python/feast/sdk/utils/print_utils.py deleted file mode 100644 index 926e4eeac53..00000000000 --- a/sdk/python/feast/sdk/utils/print_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import OrderedDict - -import yaml -from google.protobuf.json_format import MessageToDict - - -def spec_to_yaml(spec): - '''Converts spec to yaml string - - Args: - spec (google.protobuf.Message): feast spec object - - Returns: - str: yaml string representation of spec - ''' - mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG - yaml.add_representer(OrderedDict, _dict_representer) - yaml.add_constructor(mapping_tag, _dict_constructor) - dic = OrderedDict(MessageToDict(spec)) - return yaml.dump(dic, default_flow_style=False) - - -def _dict_representer(dumper, data): - return dumper.represent_dict(data.items()) - - -def _dict_constructor(loader, node): - return OrderedDict(loader.construct_pairs(node)) diff --git a/sdk/python/feast/sdk/utils/types.py b/sdk/python/feast/sdk/utils/types.py deleted file mode 100644 index 2f79a2119d3..00000000000 --- a/sdk/python/feast/sdk/utils/types.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.sdk.resources.feature import ValueType -import numpy as np - -# mapping of pandas dtypes to feast value type strings -DTYPE_TO_VALUE_TYPE_MAPPING = { - "float64": ValueType.DOUBLE, - "float32": ValueType.FLOAT, - "int64": ValueType.INT64, - "uint64": ValueType.INT64, - "int32": ValueType.INT32, - "uint32": ValueType.INT32, - "uint8": ValueType.INT32, - "int8": ValueType.INT32, - "bool": ValueType.BOOL, - "timedelta": ValueType.INT64, - "datetime64[ns]": ValueType.TIMESTAMP, - "datetime64[ns, tz]": ValueType.TIMESTAMP, - "category": ValueType.STRING, - "object": ValueType.STRING -} - -# Mapping of feast value type to Pandas DataFrame dtypes -# Integer and floating values are all 64-bit for better integration -# with BigQuery data types -FEAST_VALUETYPE_TO_DTYPE = { - "bytesVal": np.byte, - "stringVal": np.object, - "int32Val": "Int32", # Use pandas nullable int type - "int64Val": "Int64", # Use pandas nullable int type - "doubleVal": np.float64, - "floatVal": np.float64, - "boolVal": np.bool, - "timestampVal": np.datetime64, -} - - -def dtype_to_value_type(dtype): - """Returns the equivalent feast valueType for the given dtype - - Args: - dtype (pandas.dtype): pandas dtype - - Returns: - feast.types.ValueType2.ValueType: equivalent feast valuetype - """ - return DTYPE_TO_VALUE_TYPE_MAPPING[dtype.__str__()] diff --git a/sdk/python/feast/serving/ServingService_pb2.py b/sdk/python/feast/serving/ServingService_pb2.py new file mode 100644 index 00000000000..e7258f5a7d5 --- /dev/null +++ b/sdk/python/feast/serving/ServingService_pb2.py @@ -0,0 +1,971 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/ServingService.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/serving/ServingService.proto', + package='feast.serving', + syntax='proto3', + serialized_options=_b('\n\rfeast.servingB\017ServingAPIProtoZ2github.com/gojek/feast/sdk/go/protos/feast/serving'), + serialized_pb=_b('\n\"feast/serving/ServingService.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\x1c\n\x1aGetFeastServingInfoRequest\"{\n\x1bGetFeastServingInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\x12-\n\x04type\x18\x02 \x01(\x0e\x32\x1f.feast.serving.FeastServingType\x12\x1c\n\x14job_staging_location\x18\n \x01(\t\"u\n\x11\x46\x65\x61tureSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x15\n\rfeature_names\x18\x03 \x03(\t\x12*\n\x07max_age\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x93\x03\n\x18GetOnlineFeaturesRequest\x12\x36\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32 .feast.serving.FeatureSetRequest\x12\x46\n\x0b\x65ntity_rows\x18\x02 \x03(\x0b\x32\x31.feast.serving.GetOnlineFeaturesRequest.EntityRow\x12!\n\x19omit_entities_in_response\x18\x03 \x01(\x08\x1a\xd3\x01\n\tEntityRow\x12\x34\n\x10\x65ntity_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12M\n\x06\x66ields\x18\x02 \x03(\x0b\x32=.feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"\x87\x01\n\x17GetBatchFeaturesRequest\x12\x36\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32 .feast.serving.FeatureSetRequest\x12\x34\n\x0e\x64\x61taset_source\x18\x02 \x01(\x0b\x32\x1c.feast.serving.DatasetSource\"\x8c\x02\n\x19GetOnlineFeaturesResponse\x12J\n\x0c\x66ield_values\x18\x01 \x03(\x0b\x32\x34.feast.serving.GetOnlineFeaturesResponse.FieldValues\x1a\xa2\x01\n\x0b\x46ieldValues\x12P\n\x06\x66ields\x18\x01 \x03(\x0b\x32@.feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\";\n\x18GetBatchFeaturesResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"0\n\rGetJobRequest\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"1\n\x0eGetJobResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"\xb3\x01\n\x03Job\x12\n\n\x02id\x18\x01 \x01(\t\x12$\n\x04type\x18\x02 \x01(\x0e\x32\x16.feast.serving.JobType\x12(\n\x06status\x18\x03 \x01(\x0e\x32\x18.feast.serving.JobStatus\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x06 \x01(\x0e\x32\x19.feast.serving.DataFormat\"\xb2\x01\n\rDatasetSource\x12>\n\x0b\x66ile_source\x18\x01 \x01(\x0b\x32\'.feast.serving.DatasetSource.FileSourceH\x00\x1aO\n\nFileSource\x12\x11\n\tfile_uris\x18\x01 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x02 \x01(\x0e\x32\x19.feast.serving.DataFormatB\x10\n\x0e\x64\x61taset_source*o\n\x10\x46\x65\x61stServingType\x12\x1e\n\x1a\x46\x45\x41ST_SERVING_TYPE_INVALID\x10\x00\x12\x1d\n\x19\x46\x45\x41ST_SERVING_TYPE_ONLINE\x10\x01\x12\x1c\n\x18\x46\x45\x41ST_SERVING_TYPE_BATCH\x10\x02*6\n\x07JobType\x12\x14\n\x10JOB_TYPE_INVALID\x10\x00\x12\x15\n\x11JOB_TYPE_DOWNLOAD\x10\x01*h\n\tJobStatus\x12\x16\n\x12JOB_STATUS_INVALID\x10\x00\x12\x16\n\x12JOB_STATUS_PENDING\x10\x01\x12\x16\n\x12JOB_STATUS_RUNNING\x10\x02\x12\x13\n\x0fJOB_STATUS_DONE\x10\x03*;\n\nDataFormat\x12\x17\n\x13\x44\x41TA_FORMAT_INVALID\x10\x00\x12\x14\n\x10\x44\x41TA_FORMAT_AVRO\x10\x01\x32\x92\x03\n\x0eServingService\x12l\n\x13GetFeastServingInfo\x12).feast.serving.GetFeastServingInfoRequest\x1a*.feast.serving.GetFeastServingInfoResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponse\x12\x63\n\x10GetBatchFeatures\x12&.feast.serving.GetBatchFeaturesRequest\x1a\'.feast.serving.GetBatchFeaturesResponse\x12\x45\n\x06GetJob\x12\x1c.feast.serving.GetJobRequest\x1a\x1d.feast.serving.GetJobResponseBT\n\rfeast.servingB\x0fServingAPIProtoZ2github.com/gojek/feast/sdk/go/protos/feast/servingb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) + +_FEASTSERVINGTYPE = _descriptor.EnumDescriptor( + name='FeastServingType', + full_name='feast.serving.FeastServingType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FEAST_SERVING_TYPE_INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEAST_SERVING_TYPE_ONLINE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEAST_SERVING_TYPE_BATCH', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1757, + serialized_end=1868, +) +_sym_db.RegisterEnumDescriptor(_FEASTSERVINGTYPE) + +FeastServingType = enum_type_wrapper.EnumTypeWrapper(_FEASTSERVINGTYPE) +_JOBTYPE = _descriptor.EnumDescriptor( + name='JobType', + full_name='feast.serving.JobType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='JOB_TYPE_INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JOB_TYPE_DOWNLOAD', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1870, + serialized_end=1924, +) +_sym_db.RegisterEnumDescriptor(_JOBTYPE) + +JobType = enum_type_wrapper.EnumTypeWrapper(_JOBTYPE) +_JOBSTATUS = _descriptor.EnumDescriptor( + name='JobStatus', + full_name='feast.serving.JobStatus', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='JOB_STATUS_INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JOB_STATUS_PENDING', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JOB_STATUS_RUNNING', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JOB_STATUS_DONE', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1926, + serialized_end=2030, +) +_sym_db.RegisterEnumDescriptor(_JOBSTATUS) + +JobStatus = enum_type_wrapper.EnumTypeWrapper(_JOBSTATUS) +_DATAFORMAT = _descriptor.EnumDescriptor( + name='DataFormat', + full_name='feast.serving.DataFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DATA_FORMAT_INVALID', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATA_FORMAT_AVRO', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2032, + serialized_end=2091, +) +_sym_db.RegisterEnumDescriptor(_DATAFORMAT) + +DataFormat = enum_type_wrapper.EnumTypeWrapper(_DATAFORMAT) +FEAST_SERVING_TYPE_INVALID = 0 +FEAST_SERVING_TYPE_ONLINE = 1 +FEAST_SERVING_TYPE_BATCH = 2 +JOB_TYPE_INVALID = 0 +JOB_TYPE_DOWNLOAD = 1 +JOB_STATUS_INVALID = 0 +JOB_STATUS_PENDING = 1 +JOB_STATUS_RUNNING = 2 +JOB_STATUS_DONE = 3 +DATA_FORMAT_INVALID = 0 +DATA_FORMAT_AVRO = 1 + + + +_GETFEASTSERVINGINFOREQUEST = _descriptor.Descriptor( + name='GetFeastServingInfoRequest', + full_name='feast.serving.GetFeastServingInfoRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=143, + serialized_end=171, +) + + +_GETFEASTSERVINGINFORESPONSE = _descriptor.Descriptor( + name='GetFeastServingInfoResponse', + full_name='feast.serving.GetFeastServingInfoResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='version', full_name='feast.serving.GetFeastServingInfoResponse.version', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='feast.serving.GetFeastServingInfoResponse.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='job_staging_location', full_name='feast.serving.GetFeastServingInfoResponse.job_staging_location', index=2, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=173, + serialized_end=296, +) + + +_FEATURESETREQUEST = _descriptor.Descriptor( + name='FeatureSetRequest', + full_name='feast.serving.FeatureSetRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.serving.FeatureSetRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='feast.serving.FeatureSetRequest.version', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='feature_names', full_name='feast.serving.FeatureSetRequest.feature_names', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_age', full_name='feast.serving.FeatureSetRequest.max_age', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=298, + serialized_end=415, +) + + +_GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY = _descriptor.Descriptor( + name='FieldsEntry', + full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=_b('8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=756, + serialized_end=821, +) + +_GETONLINEFEATURESREQUEST_ENTITYROW = _descriptor.Descriptor( + name='EntityRow', + full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entity_timestamp', full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow.entity_timestamp', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fields', full_name='feast.serving.GetOnlineFeaturesRequest.EntityRow.fields', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=610, + serialized_end=821, +) + +_GETONLINEFEATURESREQUEST = _descriptor.Descriptor( + name='GetOnlineFeaturesRequest', + full_name='feast.serving.GetOnlineFeaturesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='feature_sets', full_name='feast.serving.GetOnlineFeaturesRequest.feature_sets', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entity_rows', full_name='feast.serving.GetOnlineFeaturesRequest.entity_rows', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='omit_entities_in_response', full_name='feast.serving.GetOnlineFeaturesRequest.omit_entities_in_response', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_GETONLINEFEATURESREQUEST_ENTITYROW, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=418, + serialized_end=821, +) + + +_GETBATCHFEATURESREQUEST = _descriptor.Descriptor( + name='GetBatchFeaturesRequest', + full_name='feast.serving.GetBatchFeaturesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='feature_sets', full_name='feast.serving.GetBatchFeaturesRequest.feature_sets', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dataset_source', full_name='feast.serving.GetBatchFeaturesRequest.dataset_source', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=824, + serialized_end=959, +) + + +_GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY = _descriptor.Descriptor( + name='FieldsEntry', + full_name='feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=_b('8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=756, + serialized_end=821, +) + +_GETONLINEFEATURESRESPONSE_FIELDVALUES = _descriptor.Descriptor( + name='FieldValues', + full_name='feast.serving.GetOnlineFeaturesResponse.FieldValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='feast.serving.GetOnlineFeaturesResponse.FieldValues.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1068, + serialized_end=1230, +) + +_GETONLINEFEATURESRESPONSE = _descriptor.Descriptor( + name='GetOnlineFeaturesResponse', + full_name='feast.serving.GetOnlineFeaturesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field_values', full_name='feast.serving.GetOnlineFeaturesResponse.field_values', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_GETONLINEFEATURESRESPONSE_FIELDVALUES, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=962, + serialized_end=1230, +) + + +_GETBATCHFEATURESRESPONSE = _descriptor.Descriptor( + name='GetBatchFeaturesResponse', + full_name='feast.serving.GetBatchFeaturesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='job', full_name='feast.serving.GetBatchFeaturesResponse.job', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1232, + serialized_end=1291, +) + + +_GETJOBREQUEST = _descriptor.Descriptor( + name='GetJobRequest', + full_name='feast.serving.GetJobRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='job', full_name='feast.serving.GetJobRequest.job', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1293, + serialized_end=1341, +) + + +_GETJOBRESPONSE = _descriptor.Descriptor( + name='GetJobResponse', + full_name='feast.serving.GetJobResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='job', full_name='feast.serving.GetJobResponse.job', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1343, + serialized_end=1392, +) + + +_JOB = _descriptor.Descriptor( + name='Job', + full_name='feast.serving.Job', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='feast.serving.Job.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='feast.serving.Job.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='status', full_name='feast.serving.Job.status', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='error', full_name='feast.serving.Job.error', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='file_uris', full_name='feast.serving.Job.file_uris', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data_format', full_name='feast.serving.Job.data_format', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1395, + serialized_end=1574, +) + + +_DATASETSOURCE_FILESOURCE = _descriptor.Descriptor( + name='FileSource', + full_name='feast.serving.DatasetSource.FileSource', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file_uris', full_name='feast.serving.DatasetSource.FileSource.file_uris', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data_format', full_name='feast.serving.DatasetSource.FileSource.data_format', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1658, + serialized_end=1737, +) + +_DATASETSOURCE = _descriptor.Descriptor( + name='DatasetSource', + full_name='feast.serving.DatasetSource', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file_source', full_name='feast.serving.DatasetSource.file_source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_DATASETSOURCE_FILESOURCE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='dataset_source', full_name='feast.serving.DatasetSource.dataset_source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1577, + serialized_end=1755, +) + +_GETFEASTSERVINGINFORESPONSE.fields_by_name['type'].enum_type = _FEASTSERVINGTYPE +_FEATURESETREQUEST.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE +_GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY.containing_type = _GETONLINEFEATURESREQUEST_ENTITYROW +_GETONLINEFEATURESREQUEST_ENTITYROW.fields_by_name['entity_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GETONLINEFEATURESREQUEST_ENTITYROW.fields_by_name['fields'].message_type = _GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY +_GETONLINEFEATURESREQUEST_ENTITYROW.containing_type = _GETONLINEFEATURESREQUEST +_GETONLINEFEATURESREQUEST.fields_by_name['feature_sets'].message_type = _FEATURESETREQUEST +_GETONLINEFEATURESREQUEST.fields_by_name['entity_rows'].message_type = _GETONLINEFEATURESREQUEST_ENTITYROW +_GETBATCHFEATURESREQUEST.fields_by_name['feature_sets'].message_type = _FEATURESETREQUEST +_GETBATCHFEATURESREQUEST.fields_by_name['dataset_source'].message_type = _DATASETSOURCE +_GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE +_GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY.containing_type = _GETONLINEFEATURESRESPONSE_FIELDVALUES +_GETONLINEFEATURESRESPONSE_FIELDVALUES.fields_by_name['fields'].message_type = _GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY +_GETONLINEFEATURESRESPONSE_FIELDVALUES.containing_type = _GETONLINEFEATURESRESPONSE +_GETONLINEFEATURESRESPONSE.fields_by_name['field_values'].message_type = _GETONLINEFEATURESRESPONSE_FIELDVALUES +_GETBATCHFEATURESRESPONSE.fields_by_name['job'].message_type = _JOB +_GETJOBREQUEST.fields_by_name['job'].message_type = _JOB +_GETJOBRESPONSE.fields_by_name['job'].message_type = _JOB +_JOB.fields_by_name['type'].enum_type = _JOBTYPE +_JOB.fields_by_name['status'].enum_type = _JOBSTATUS +_JOB.fields_by_name['data_format'].enum_type = _DATAFORMAT +_DATASETSOURCE_FILESOURCE.fields_by_name['data_format'].enum_type = _DATAFORMAT +_DATASETSOURCE_FILESOURCE.containing_type = _DATASETSOURCE +_DATASETSOURCE.fields_by_name['file_source'].message_type = _DATASETSOURCE_FILESOURCE +_DATASETSOURCE.oneofs_by_name['dataset_source'].fields.append( + _DATASETSOURCE.fields_by_name['file_source']) +_DATASETSOURCE.fields_by_name['file_source'].containing_oneof = _DATASETSOURCE.oneofs_by_name['dataset_source'] +DESCRIPTOR.message_types_by_name['GetFeastServingInfoRequest'] = _GETFEASTSERVINGINFOREQUEST +DESCRIPTOR.message_types_by_name['GetFeastServingInfoResponse'] = _GETFEASTSERVINGINFORESPONSE +DESCRIPTOR.message_types_by_name['FeatureSetRequest'] = _FEATURESETREQUEST +DESCRIPTOR.message_types_by_name['GetOnlineFeaturesRequest'] = _GETONLINEFEATURESREQUEST +DESCRIPTOR.message_types_by_name['GetBatchFeaturesRequest'] = _GETBATCHFEATURESREQUEST +DESCRIPTOR.message_types_by_name['GetOnlineFeaturesResponse'] = _GETONLINEFEATURESRESPONSE +DESCRIPTOR.message_types_by_name['GetBatchFeaturesResponse'] = _GETBATCHFEATURESRESPONSE +DESCRIPTOR.message_types_by_name['GetJobRequest'] = _GETJOBREQUEST +DESCRIPTOR.message_types_by_name['GetJobResponse'] = _GETJOBRESPONSE +DESCRIPTOR.message_types_by_name['Job'] = _JOB +DESCRIPTOR.message_types_by_name['DatasetSource'] = _DATASETSOURCE +DESCRIPTOR.enum_types_by_name['FeastServingType'] = _FEASTSERVINGTYPE +DESCRIPTOR.enum_types_by_name['JobType'] = _JOBTYPE +DESCRIPTOR.enum_types_by_name['JobStatus'] = _JOBSTATUS +DESCRIPTOR.enum_types_by_name['DataFormat'] = _DATAFORMAT +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +GetFeastServingInfoRequest = _reflection.GeneratedProtocolMessageType('GetFeastServingInfoRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETFEASTSERVINGINFOREQUEST, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetFeastServingInfoRequest) + }) +_sym_db.RegisterMessage(GetFeastServingInfoRequest) + +GetFeastServingInfoResponse = _reflection.GeneratedProtocolMessageType('GetFeastServingInfoResponse', (_message.Message,), { + 'DESCRIPTOR' : _GETFEASTSERVINGINFORESPONSE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetFeastServingInfoResponse) + }) +_sym_db.RegisterMessage(GetFeastServingInfoResponse) + +FeatureSetRequest = _reflection.GeneratedProtocolMessageType('FeatureSetRequest', (_message.Message,), { + 'DESCRIPTOR' : _FEATURESETREQUEST, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.FeatureSetRequest) + }) +_sym_db.RegisterMessage(FeatureSetRequest) + +GetOnlineFeaturesRequest = _reflection.GeneratedProtocolMessageType('GetOnlineFeaturesRequest', (_message.Message,), { + + 'EntityRow' : _reflection.GeneratedProtocolMessageType('EntityRow', (_message.Message,), { + + 'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), { + 'DESCRIPTOR' : _GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry) + }) + , + 'DESCRIPTOR' : _GETONLINEFEATURESREQUEST_ENTITYROW, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesRequest.EntityRow) + }) + , + 'DESCRIPTOR' : _GETONLINEFEATURESREQUEST, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesRequest) + }) +_sym_db.RegisterMessage(GetOnlineFeaturesRequest) +_sym_db.RegisterMessage(GetOnlineFeaturesRequest.EntityRow) +_sym_db.RegisterMessage(GetOnlineFeaturesRequest.EntityRow.FieldsEntry) + +GetBatchFeaturesRequest = _reflection.GeneratedProtocolMessageType('GetBatchFeaturesRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETBATCHFEATURESREQUEST, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetBatchFeaturesRequest) + }) +_sym_db.RegisterMessage(GetBatchFeaturesRequest) + +GetOnlineFeaturesResponse = _reflection.GeneratedProtocolMessageType('GetOnlineFeaturesResponse', (_message.Message,), { + + 'FieldValues' : _reflection.GeneratedProtocolMessageType('FieldValues', (_message.Message,), { + + 'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), { + 'DESCRIPTOR' : _GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry) + }) + , + 'DESCRIPTOR' : _GETONLINEFEATURESRESPONSE_FIELDVALUES, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesResponse.FieldValues) + }) + , + 'DESCRIPTOR' : _GETONLINEFEATURESRESPONSE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetOnlineFeaturesResponse) + }) +_sym_db.RegisterMessage(GetOnlineFeaturesResponse) +_sym_db.RegisterMessage(GetOnlineFeaturesResponse.FieldValues) +_sym_db.RegisterMessage(GetOnlineFeaturesResponse.FieldValues.FieldsEntry) + +GetBatchFeaturesResponse = _reflection.GeneratedProtocolMessageType('GetBatchFeaturesResponse', (_message.Message,), { + 'DESCRIPTOR' : _GETBATCHFEATURESRESPONSE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetBatchFeaturesResponse) + }) +_sym_db.RegisterMessage(GetBatchFeaturesResponse) + +GetJobRequest = _reflection.GeneratedProtocolMessageType('GetJobRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETJOBREQUEST, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetJobRequest) + }) +_sym_db.RegisterMessage(GetJobRequest) + +GetJobResponse = _reflection.GeneratedProtocolMessageType('GetJobResponse', (_message.Message,), { + 'DESCRIPTOR' : _GETJOBRESPONSE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.GetJobResponse) + }) +_sym_db.RegisterMessage(GetJobResponse) + +Job = _reflection.GeneratedProtocolMessageType('Job', (_message.Message,), { + 'DESCRIPTOR' : _JOB, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.Job) + }) +_sym_db.RegisterMessage(Job) + +DatasetSource = _reflection.GeneratedProtocolMessageType('DatasetSource', (_message.Message,), { + + 'FileSource' : _reflection.GeneratedProtocolMessageType('FileSource', (_message.Message,), { + 'DESCRIPTOR' : _DATASETSOURCE_FILESOURCE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.DatasetSource.FileSource) + }) + , + 'DESCRIPTOR' : _DATASETSOURCE, + '__module__' : 'feast.serving.ServingService_pb2' + # @@protoc_insertion_point(class_scope:feast.serving.DatasetSource) + }) +_sym_db.RegisterMessage(DatasetSource) +_sym_db.RegisterMessage(DatasetSource.FileSource) + + +DESCRIPTOR._options = None +_GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY._options = None +_GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY._options = None + +_SERVINGSERVICE = _descriptor.ServiceDescriptor( + name='ServingService', + full_name='feast.serving.ServingService', + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=2094, + serialized_end=2496, + methods=[ + _descriptor.MethodDescriptor( + name='GetFeastServingInfo', + full_name='feast.serving.ServingService.GetFeastServingInfo', + index=0, + containing_service=None, + input_type=_GETFEASTSERVINGINFOREQUEST, + output_type=_GETFEASTSERVINGINFORESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='GetOnlineFeatures', + full_name='feast.serving.ServingService.GetOnlineFeatures', + index=1, + containing_service=None, + input_type=_GETONLINEFEATURESREQUEST, + output_type=_GETONLINEFEATURESRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='GetBatchFeatures', + full_name='feast.serving.ServingService.GetBatchFeatures', + index=2, + containing_service=None, + input_type=_GETBATCHFEATURESREQUEST, + output_type=_GETBATCHFEATURESRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='GetJob', + full_name='feast.serving.ServingService.GetJob', + index=3, + containing_service=None, + input_type=_GETJOBREQUEST, + output_type=_GETJOBRESPONSE, + serialized_options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_SERVINGSERVICE) + +DESCRIPTOR.services_by_name['ServingService'] = _SERVINGSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/serving/ServingService_pb2.pyi b/sdk/python/feast/serving/ServingService_pb2.pyi new file mode 100644 index 00000000000..d03fa6568fa --- /dev/null +++ b/sdk/python/feast/serving/ServingService_pb2.pyi @@ -0,0 +1,465 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import ( + Value as feast___types___Value_pb2___Value, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.duration_pb2 import ( + Duration as google___protobuf___duration_pb2___Duration, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeastServingType(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> FeastServingType: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[FeastServingType]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, FeastServingType]]: ... + FEAST_SERVING_TYPE_INVALID = typing___cast(FeastServingType, 0) + FEAST_SERVING_TYPE_ONLINE = typing___cast(FeastServingType, 1) + FEAST_SERVING_TYPE_BATCH = typing___cast(FeastServingType, 2) +FEAST_SERVING_TYPE_INVALID = typing___cast(FeastServingType, 0) +FEAST_SERVING_TYPE_ONLINE = typing___cast(FeastServingType, 1) +FEAST_SERVING_TYPE_BATCH = typing___cast(FeastServingType, 2) + +class JobType(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> JobType: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[JobType]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, JobType]]: ... + JOB_TYPE_INVALID = typing___cast(JobType, 0) + JOB_TYPE_DOWNLOAD = typing___cast(JobType, 1) +JOB_TYPE_INVALID = typing___cast(JobType, 0) +JOB_TYPE_DOWNLOAD = typing___cast(JobType, 1) + +class JobStatus(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> JobStatus: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[JobStatus]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, JobStatus]]: ... + JOB_STATUS_INVALID = typing___cast(JobStatus, 0) + JOB_STATUS_PENDING = typing___cast(JobStatus, 1) + JOB_STATUS_RUNNING = typing___cast(JobStatus, 2) + JOB_STATUS_DONE = typing___cast(JobStatus, 3) +JOB_STATUS_INVALID = typing___cast(JobStatus, 0) +JOB_STATUS_PENDING = typing___cast(JobStatus, 1) +JOB_STATUS_RUNNING = typing___cast(JobStatus, 2) +JOB_STATUS_DONE = typing___cast(JobStatus, 3) + +class DataFormat(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> DataFormat: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[DataFormat]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, DataFormat]]: ... + DATA_FORMAT_INVALID = typing___cast(DataFormat, 0) + DATA_FORMAT_AVRO = typing___cast(DataFormat, 1) +DATA_FORMAT_INVALID = typing___cast(DataFormat, 0) +DATA_FORMAT_AVRO = typing___cast(DataFormat, 1) + +class GetFeastServingInfoRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeastServingInfoRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class GetFeastServingInfoResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + version = ... # type: typing___Text + type = ... # type: FeastServingType + job_staging_location = ... # type: typing___Text + + def __init__(self, + *, + version : typing___Optional[typing___Text] = None, + type : typing___Optional[FeastServingType] = None, + job_staging_location : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetFeastServingInfoResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"job_staging_location",u"type",u"version"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"job_staging_location",b"job_staging_location",u"type",b"type",u"version",b"version"]) -> None: ... + +class FeatureSetRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + version = ... # type: int + feature_names = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def max_age(self) -> google___protobuf___duration_pb2___Duration: ... + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + version : typing___Optional[int] = None, + feature_names : typing___Optional[typing___Iterable[typing___Text]] = None, + max_age : typing___Optional[google___protobuf___duration_pb2___Duration] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSetRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"max_age"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_names",u"max_age",u"name",u"version"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_names",b"feature_names",u"max_age",b"max_age",u"name",b"name",u"version",b"version"]) -> None: ... + +class GetOnlineFeaturesRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class EntityRow(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class FieldsEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + key = ... # type: typing___Text + + @property + def value(self) -> feast___types___Value_pb2___Value: ... + + def __init__(self, + *, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[feast___types___Value_pb2___Value] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest.EntityRow.FieldsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... + + + @property + def entity_timestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def fields(self) -> typing___MutableMapping[typing___Text, feast___types___Value_pb2___Value]: ... + + def __init__(self, + *, + entity_timestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + fields : typing___Optional[typing___Mapping[typing___Text, feast___types___Value_pb2___Value]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest.EntityRow: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entity_timestamp",u"fields"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp",b"entity_timestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entity_timestamp",b"entity_timestamp",u"fields",b"fields"]) -> None: ... + + omit_entities_in_response = ... # type: bool + + @property + def feature_sets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureSetRequest]: ... + + @property + def entity_rows(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[GetOnlineFeaturesRequest.EntityRow]: ... + + def __init__(self, + *, + feature_sets : typing___Optional[typing___Iterable[FeatureSetRequest]] = None, + entity_rows : typing___Optional[typing___Iterable[GetOnlineFeaturesRequest.EntityRow]] = None, + omit_entities_in_response : typing___Optional[bool] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",u"feature_sets",u"omit_entities_in_response"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",b"entity_rows",u"feature_sets",b"feature_sets",u"omit_entities_in_response",b"omit_entities_in_response"]) -> None: ... + +class GetBatchFeaturesRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def feature_sets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureSetRequest]: ... + + @property + def dataset_source(self) -> DatasetSource: ... + + def __init__(self, + *, + feature_sets : typing___Optional[typing___Iterable[FeatureSetRequest]] = None, + dataset_source : typing___Optional[DatasetSource] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetBatchFeaturesRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"feature_sets"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"feature_sets",b"feature_sets"]) -> None: ... + +class GetOnlineFeaturesResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class FieldValues(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class FieldsEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + key = ... # type: typing___Text + + @property + def value(self) -> feast___types___Value_pb2___Value: ... + + def __init__(self, + *, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[feast___types___Value_pb2___Value] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse.FieldValues.FieldsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... + + + @property + def fields(self) -> typing___MutableMapping[typing___Text, feast___types___Value_pb2___Value]: ... + + def __init__(self, + *, + fields : typing___Optional[typing___Mapping[typing___Text, feast___types___Value_pb2___Value]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse.FieldValues: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"fields"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"fields",b"fields"]) -> None: ... + + + @property + def field_values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[GetOnlineFeaturesResponse.FieldValues]: ... + + def __init__(self, + *, + field_values : typing___Optional[typing___Iterable[GetOnlineFeaturesResponse.FieldValues]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"field_values"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"field_values",b"field_values"]) -> None: ... + +class GetBatchFeaturesResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def job(self) -> Job: ... + + def __init__(self, + *, + job : typing___Optional[Job] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetBatchFeaturesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... + +class GetJobRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def job(self) -> Job: ... + + def __init__(self, + *, + job : typing___Optional[Job] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetJobRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... + +class GetJobResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def job(self) -> Job: ... + + def __init__(self, + *, + job : typing___Optional[Job] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> GetJobResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... + +class Job(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + id = ... # type: typing___Text + type = ... # type: JobType + status = ... # type: JobStatus + error = ... # type: typing___Text + file_uris = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + data_format = ... # type: DataFormat + + def __init__(self, + *, + id : typing___Optional[typing___Text] = None, + type : typing___Optional[JobType] = None, + status : typing___Optional[JobStatus] = None, + error : typing___Optional[typing___Text] = None, + file_uris : typing___Optional[typing___Iterable[typing___Text]] = None, + data_format : typing___Optional[DataFormat] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Job: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"data_format",u"error",u"file_uris",u"id",u"status",u"type"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"data_format",b"data_format",u"error",b"error",u"file_uris",b"file_uris",u"id",b"id",u"status",b"status",u"type",b"type"]) -> None: ... + +class DatasetSource(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class FileSource(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + file_uris = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + data_format = ... # type: DataFormat + + def __init__(self, + *, + file_uris : typing___Optional[typing___Iterable[typing___Text]] = None, + data_format : typing___Optional[DataFormat] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetSource.FileSource: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"data_format",u"file_uris"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"data_format",b"data_format",u"file_uris",b"file_uris"]) -> None: ... + + + @property + def file_source(self) -> DatasetSource.FileSource: ... + + def __init__(self, + *, + file_source : typing___Optional[DatasetSource.FileSource] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetSource: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",u"file_source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"file_source"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"file_source",b"file_source"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"file_source",b"file_source"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> typing_extensions___Literal["file_source"]: ... diff --git a/sdk/python/feast/serving/ServingService_pb2_grpc.py b/sdk/python/feast/serving/ServingService_pb2_grpc.py new file mode 100644 index 00000000000..c73f9c744a6 --- /dev/null +++ b/sdk/python/feast/serving/ServingService_pb2_grpc.py @@ -0,0 +1,104 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +class ServingServiceStub(object): + # missing associated documentation comment in .proto file + pass + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetFeastServingInfo = channel.unary_unary( + '/feast.serving.ServingService/GetFeastServingInfo', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.FromString, + ) + self.GetOnlineFeatures = channel.unary_unary( + '/feast.serving.ServingService/GetOnlineFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + ) + self.GetBatchFeatures = channel.unary_unary( + '/feast.serving.ServingService/GetBatchFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetBatchFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetBatchFeaturesResponse.FromString, + ) + self.GetJob = channel.unary_unary( + '/feast.serving.ServingService/GetJob', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetJobRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetJobResponse.FromString, + ) + + +class ServingServiceServicer(object): + # missing associated documentation comment in .proto file + pass + + def GetFeastServingInfo(self, request, context): + """Get information about this Feast serving. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnlineFeatures(self, request, context): + """Get online features synchronously. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetBatchFeatures(self, request, context): + """Get batch features asynchronously. + + The client should check the status of the returned job periodically by + calling ReloadJob to determine if the job has completed successfully + or with an error. If the job completes successfully i.e. + status = JOB_STATUS_DONE with no error, then the client can check + the file_uris for the location to download feature values data. + The client is assumed to have access to these file URIs. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetJob(self, request, context): + """Get the latest job status for batch feature retrieval. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ServingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetFeastServingInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetFeastServingInfo, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.SerializeToString, + ), + 'GetOnlineFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetOnlineFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.SerializeToString, + ), + 'GetBatchFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetBatchFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetBatchFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetBatchFeaturesResponse.SerializeToString, + ), + 'GetJob': grpc.unary_unary_rpc_method_handler( + servicer.GetJob, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetJobRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetJobResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.serving.ServingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/sdk/python/feast/serving/Serving_pb2.py b/sdk/python/feast/serving/Serving_pb2.py deleted file mode 100644 index 4f7900e2c3e..00000000000 --- a/sdk/python/feast/serving/Serving_pb2.py +++ /dev/null @@ -1,342 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/serving/Serving.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/serving/Serving.proto', - package='feast.serving', - syntax='proto3', - serialized_options=_b('\n\rfeast.servingB\017ServingAPIProtoZ8github.com/gojek/feast/protos/generated/go/feast/serving'), - serialized_pb=_b('\n\x1b\x66\x65\x61st/serving/Serving.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"O\n\x14QueryFeaturesRequest\x12\x12\n\nentityName\x18\x01 \x01(\t\x12\x10\n\x08\x65ntityId\x18\x02 \x03(\t\x12\x11\n\tfeatureId\x18\x03 \x03(\t\"\xb9\x01\n\x15QueryFeaturesResponse\x12\x12\n\nentityName\x18\x01 \x01(\t\x12\x44\n\x08\x65ntities\x18\x02 \x03(\x0b\x32\x32.feast.serving.QueryFeaturesResponse.EntitiesEntry\x1a\x46\n\rEntitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.feast.serving.Entity:\x02\x38\x01\"\x8d\x01\n\x06\x45ntity\x12\x35\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32#.feast.serving.Entity.FeaturesEntry\x1aL\n\rFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.feast.serving.FeatureValue:\x02\x38\x01\"`\n\x0c\x46\x65\x61tureValue\x12!\n\x05value\x18\x01 \x01(\x0b\x32\x12.feast.types.Value\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2j\n\nServingAPI\x12\\\n\rQueryFeatures\x12#.feast.serving.QueryFeaturesRequest\x1a$.feast.serving.QueryFeaturesResponse\"\x00\x42Z\n\rfeast.servingB\x0fServingAPIProtoZ8github.com/gojek/feast/protos/generated/go/feast/servingb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) - - - - -_QUERYFEATURESREQUEST = _descriptor.Descriptor( - name='QueryFeaturesRequest', - full_name='feast.serving.QueryFeaturesRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entityName', full_name='feast.serving.QueryFeaturesRequest.entityName', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entityId', full_name='feast.serving.QueryFeaturesRequest.entityId', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='featureId', full_name='feast.serving.QueryFeaturesRequest.featureId', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=104, - serialized_end=183, -) - - -_QUERYFEATURESRESPONSE_ENTITIESENTRY = _descriptor.Descriptor( - name='EntitiesEntry', - full_name='feast.serving.QueryFeaturesResponse.EntitiesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.serving.QueryFeaturesResponse.EntitiesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.serving.QueryFeaturesResponse.EntitiesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=301, - serialized_end=371, -) - -_QUERYFEATURESRESPONSE = _descriptor.Descriptor( - name='QueryFeaturesResponse', - full_name='feast.serving.QueryFeaturesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entityName', full_name='feast.serving.QueryFeaturesResponse.entityName', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entities', full_name='feast.serving.QueryFeaturesResponse.entities', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_QUERYFEATURESRESPONSE_ENTITIESENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=186, - serialized_end=371, -) - - -_ENTITY_FEATURESENTRY = _descriptor.Descriptor( - name='FeaturesEntry', - full_name='feast.serving.Entity.FeaturesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.serving.Entity.FeaturesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.serving.Entity.FeaturesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=439, - serialized_end=515, -) - -_ENTITY = _descriptor.Descriptor( - name='Entity', - full_name='feast.serving.Entity', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='features', full_name='feast.serving.Entity.features', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_ENTITY_FEATURESENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=374, - serialized_end=515, -) - - -_FEATUREVALUE = _descriptor.Descriptor( - name='FeatureValue', - full_name='feast.serving.FeatureValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='feast.serving.FeatureValue.value', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp', full_name='feast.serving.FeatureValue.timestamp', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=517, - serialized_end=613, -) - -_QUERYFEATURESRESPONSE_ENTITIESENTRY.fields_by_name['value'].message_type = _ENTITY -_QUERYFEATURESRESPONSE_ENTITIESENTRY.containing_type = _QUERYFEATURESRESPONSE -_QUERYFEATURESRESPONSE.fields_by_name['entities'].message_type = _QUERYFEATURESRESPONSE_ENTITIESENTRY -_ENTITY_FEATURESENTRY.fields_by_name['value'].message_type = _FEATUREVALUE -_ENTITY_FEATURESENTRY.containing_type = _ENTITY -_ENTITY.fields_by_name['features'].message_type = _ENTITY_FEATURESENTRY -_FEATUREVALUE.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE -_FEATUREVALUE.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name['QueryFeaturesRequest'] = _QUERYFEATURESREQUEST -DESCRIPTOR.message_types_by_name['QueryFeaturesResponse'] = _QUERYFEATURESRESPONSE -DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY -DESCRIPTOR.message_types_by_name['FeatureValue'] = _FEATUREVALUE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -QueryFeaturesRequest = _reflection.GeneratedProtocolMessageType('QueryFeaturesRequest', (_message.Message,), dict( - DESCRIPTOR = _QUERYFEATURESREQUEST, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.QueryFeaturesRequest) - )) -_sym_db.RegisterMessage(QueryFeaturesRequest) - -QueryFeaturesResponse = _reflection.GeneratedProtocolMessageType('QueryFeaturesResponse', (_message.Message,), dict( - - EntitiesEntry = _reflection.GeneratedProtocolMessageType('EntitiesEntry', (_message.Message,), dict( - DESCRIPTOR = _QUERYFEATURESRESPONSE_ENTITIESENTRY, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.QueryFeaturesResponse.EntitiesEntry) - )) - , - DESCRIPTOR = _QUERYFEATURESRESPONSE, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.QueryFeaturesResponse) - )) -_sym_db.RegisterMessage(QueryFeaturesResponse) -_sym_db.RegisterMessage(QueryFeaturesResponse.EntitiesEntry) - -Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( - - FeaturesEntry = _reflection.GeneratedProtocolMessageType('FeaturesEntry', (_message.Message,), dict( - DESCRIPTOR = _ENTITY_FEATURESENTRY, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.Entity.FeaturesEntry) - )) - , - DESCRIPTOR = _ENTITY, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.Entity) - )) -_sym_db.RegisterMessage(Entity) -_sym_db.RegisterMessage(Entity.FeaturesEntry) - -FeatureValue = _reflection.GeneratedProtocolMessageType('FeatureValue', (_message.Message,), dict( - DESCRIPTOR = _FEATUREVALUE, - __module__ = 'feast.serving.Serving_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.FeatureValue) - )) -_sym_db.RegisterMessage(FeatureValue) - - -DESCRIPTOR._options = None -_QUERYFEATURESRESPONSE_ENTITIESENTRY._options = None -_ENTITY_FEATURESENTRY._options = None - -_SERVINGAPI = _descriptor.ServiceDescriptor( - name='ServingAPI', - full_name='feast.serving.ServingAPI', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=615, - serialized_end=721, - methods=[ - _descriptor.MethodDescriptor( - name='QueryFeatures', - full_name='feast.serving.ServingAPI.QueryFeatures', - index=0, - containing_service=None, - input_type=_QUERYFEATURESREQUEST, - output_type=_QUERYFEATURESRESPONSE, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_SERVINGAPI) - -DESCRIPTOR.services_by_name['ServingAPI'] = _SERVINGAPI - -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/serving/Serving_pb2_grpc.py b/sdk/python/feast/serving/Serving_pb2_grpc.py deleted file mode 100644 index d3a8da4b4d5..00000000000 --- a/sdk/python/feast/serving/Serving_pb2_grpc.py +++ /dev/null @@ -1,46 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from feast.serving import Serving_pb2 as feast_dot_serving_dot_Serving__pb2 - - -class ServingAPIStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.QueryFeatures = channel.unary_unary( - '/feast.serving.ServingAPI/QueryFeatures', - request_serializer=feast_dot_serving_dot_Serving__pb2.QueryFeaturesRequest.SerializeToString, - response_deserializer=feast_dot_serving_dot_Serving__pb2.QueryFeaturesResponse.FromString, - ) - - -class ServingAPIServicer(object): - # missing associated documentation comment in .proto file - pass - - def QueryFeatures(self, request, context): - """Query features from Feast serving storage - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ServingAPIServicer_to_server(servicer, server): - rpc_method_handlers = { - 'QueryFeatures': grpc.unary_unary_rpc_method_handler( - servicer.QueryFeatures, - request_deserializer=feast_dot_serving_dot_Serving__pb2.QueryFeaturesRequest.FromString, - response_serializer=feast_dot_serving_dot_Serving__pb2.QueryFeaturesResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'feast.serving.ServingAPI', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/sdk/python/feast/source.py b/sdk/python/feast/source.py new file mode 100644 index 00000000000..f1f9b1a4c43 --- /dev/null +++ b/sdk/python/feast/source.py @@ -0,0 +1,76 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from feast.core.Source_pb2 import ( + Source as SourceProto, + KafkaSourceConfig as KafkaSourceConfigProto, + SourceType as SourceTypeProto, +) + + +class Source: + def __eq__(self, other): + return True + + @property + def source_type(self) -> str: + return "None" + + def to_proto(self): + return None + + @classmethod + def from_proto(cls, source_proto: SourceProto): + if source_proto.type == SourceTypeProto.KAFKA: + return KafkaSource( + brokers=source_proto.kafka_source_config.bootstrap_servers, + topic=source_proto.kafka_source_config.topic, + ) + + return cls() + + +class KafkaSource(Source): + def __init__(self, brokers: str = "", topic: str = ""): + self._source_type = "Kafka" + self._brokers = brokers + self._topic = topic + + def __eq__(self, other): + if ( + self.brokers != other.brokers + or self.topic != other.topic + or self.source_type != other.source_type + ): + return False + return True + + @property + def brokers(self): + return self._brokers + + @property + def topic(self): + return self._topic + + @property + def source_type(self): + return self._source_type + + def to_proto(self) -> SourceProto: + return SourceProto( + type=SourceTypeProto.KAFKA, + kafka_source_config=KafkaSourceConfigProto( + bootstrap_servers=self.brokers, topic=self.topic + ), + ) diff --git a/sdk/python/feast/specs/EntitySpec_pb2.py b/sdk/python/feast/specs/EntitySpec_pb2.py deleted file mode 100644 index 2d8dec931e3..00000000000 --- a/sdk/python/feast/specs/EntitySpec_pb2.py +++ /dev/null @@ -1,84 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/EntitySpec.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/EntitySpec.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\017EntitySpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/specs/EntitySpec.proto\x12\x0b\x66\x65\x61st.specs\"=\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04tags\x18\x03 \x03(\tBV\n\x0b\x66\x65\x61st.specsB\x0f\x45ntitySpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') -) - - - - -_ENTITYSPEC = _descriptor.Descriptor( - name='EntitySpec', - full_name='feast.specs.EntitySpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='feast.specs.EntitySpec.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='feast.specs.EntitySpec.description', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='tags', full_name='feast.specs.EntitySpec.tags', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=45, - serialized_end=106, -) - -DESCRIPTOR.message_types_by_name['EntitySpec'] = _ENTITYSPEC -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EntitySpec = _reflection.GeneratedProtocolMessageType('EntitySpec', (_message.Message,), dict( - DESCRIPTOR = _ENTITYSPEC, - __module__ = 'feast.specs.EntitySpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.EntitySpec) - )) -_sym_db.RegisterMessage(EntitySpec) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/FeatureGroupSpec_pb2.py b/sdk/python/feast/specs/FeatureGroupSpec_pb2.py deleted file mode 100644 index e4f0f1a2756..00000000000 --- a/sdk/python/feast/specs/FeatureGroupSpec_pb2.py +++ /dev/null @@ -1,87 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/FeatureGroupSpec.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/FeatureGroupSpec.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\025FeatureGroupSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\"feast/specs/FeatureGroupSpec.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\"Y\n\x10\x46\x65\x61tureGroupSpec\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04tags\x18\x02 \x03(\t\x12+\n\ndataStores\x18\x03 \x01(\x0b\x32\x17.feast.specs.DataStoresB\\\n\x0b\x66\x65\x61st.specsB\x15\x46\x65\x61tureGroupSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') - , - dependencies=[feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,]) - - - - -_FEATUREGROUPSPEC = _descriptor.Descriptor( - name='FeatureGroupSpec', - full_name='feast.specs.FeatureGroupSpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.specs.FeatureGroupSpec.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='tags', full_name='feast.specs.FeatureGroupSpec.tags', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='dataStores', full_name='feast.specs.FeatureGroupSpec.dataStores', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=82, - serialized_end=171, -) - -_FEATUREGROUPSPEC.fields_by_name['dataStores'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._DATASTORES -DESCRIPTOR.message_types_by_name['FeatureGroupSpec'] = _FEATUREGROUPSPEC -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -FeatureGroupSpec = _reflection.GeneratedProtocolMessageType('FeatureGroupSpec', (_message.Message,), dict( - DESCRIPTOR = _FEATUREGROUPSPEC, - __module__ = 'feast.specs.FeatureGroupSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.FeatureGroupSpec) - )) -_sym_db.RegisterMessage(FeatureGroupSpec) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/FeatureSpec_pb2.py b/sdk/python/feast/specs/FeatureSpec_pb2.py deleted file mode 100644 index ce50ca533a9..00000000000 --- a/sdk/python/feast/specs/FeatureSpec_pb2.py +++ /dev/null @@ -1,336 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/FeatureSpec.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 -from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 -from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/FeatureSpec.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\020FeatureSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\xca\x02\n\x0b\x46\x65\x61tureSpec\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05owner\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x0b\n\x03uri\x18\x05 \x01(\t\x12.\n\tvalueType\x18\x07 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x0e\n\x06\x65ntity\x18\x08 \x01(\t\x12\r\n\x05group\x18\t \x01(\t\x12\x0c\n\x04tags\x18\n \x03(\t\x12\x36\n\x07options\x18\x0b \x03(\x0b\x32%.feast.specs.FeatureSpec.OptionsEntry\x12+\n\ndataStores\x18\x0c \x01(\x0b\x32\x17.feast.specs.DataStores\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"`\n\nDataStores\x12\'\n\x07serving\x18\x01 \x01(\x0b\x32\x16.feast.specs.DataStore\x12)\n\twarehouse\x18\x02 \x01(\x0b\x32\x16.feast.specs.DataStore\"}\n\tDataStore\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07options\x18\x02 \x03(\x0b\x32#.feast.specs.DataStore.OptionsEntry\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42W\n\x0b\x66\x65\x61st.specsB\x10\x46\x65\x61tureSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') - , - dependencies=[feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) - - - - -_FEATURESPEC_OPTIONSENTRY = _descriptor.Descriptor( - name='OptionsEntry', - full_name='feast.specs.FeatureSpec.OptionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.specs.FeatureSpec.OptionsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.specs.FeatureSpec.OptionsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=417, - serialized_end=463, -) - -_FEATURESPEC = _descriptor.Descriptor( - name='FeatureSpec', - full_name='feast.specs.FeatureSpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.specs.FeatureSpec.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='feast.specs.FeatureSpec.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='owner', full_name='feast.specs.FeatureSpec.owner', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='feast.specs.FeatureSpec.description', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uri', full_name='feast.specs.FeatureSpec.uri', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='valueType', full_name='feast.specs.FeatureSpec.valueType', index=5, - number=7, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entity', full_name='feast.specs.FeatureSpec.entity', index=6, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='group', full_name='feast.specs.FeatureSpec.group', index=7, - number=9, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='tags', full_name='feast.specs.FeatureSpec.tags', index=8, - number=10, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='options', full_name='feast.specs.FeatureSpec.options', index=9, - number=11, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='dataStores', full_name='feast.specs.FeatureSpec.dataStores', index=10, - number=12, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_FEATURESPEC_OPTIONSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=133, - serialized_end=463, -) - - -_DATASTORES = _descriptor.Descriptor( - name='DataStores', - full_name='feast.specs.DataStores', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='serving', full_name='feast.specs.DataStores.serving', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='warehouse', full_name='feast.specs.DataStores.warehouse', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=465, - serialized_end=561, -) - - -_DATASTORE_OPTIONSENTRY = _descriptor.Descriptor( - name='OptionsEntry', - full_name='feast.specs.DataStore.OptionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.specs.DataStore.OptionsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.specs.DataStore.OptionsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=417, - serialized_end=463, -) - -_DATASTORE = _descriptor.Descriptor( - name='DataStore', - full_name='feast.specs.DataStore', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.specs.DataStore.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='options', full_name='feast.specs.DataStore.options', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_DATASTORE_OPTIONSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=563, - serialized_end=688, -) - -_FEATURESPEC_OPTIONSENTRY.containing_type = _FEATURESPEC -_FEATURESPEC.fields_by_name['valueType'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM -_FEATURESPEC.fields_by_name['options'].message_type = _FEATURESPEC_OPTIONSENTRY -_FEATURESPEC.fields_by_name['dataStores'].message_type = _DATASTORES -_DATASTORES.fields_by_name['serving'].message_type = _DATASTORE -_DATASTORES.fields_by_name['warehouse'].message_type = _DATASTORE -_DATASTORE_OPTIONSENTRY.containing_type = _DATASTORE -_DATASTORE.fields_by_name['options'].message_type = _DATASTORE_OPTIONSENTRY -DESCRIPTOR.message_types_by_name['FeatureSpec'] = _FEATURESPEC -DESCRIPTOR.message_types_by_name['DataStores'] = _DATASTORES -DESCRIPTOR.message_types_by_name['DataStore'] = _DATASTORE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -FeatureSpec = _reflection.GeneratedProtocolMessageType('FeatureSpec', (_message.Message,), dict( - - OptionsEntry = _reflection.GeneratedProtocolMessageType('OptionsEntry', (_message.Message,), dict( - DESCRIPTOR = _FEATURESPEC_OPTIONSENTRY, - __module__ = 'feast.specs.FeatureSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.FeatureSpec.OptionsEntry) - )) - , - DESCRIPTOR = _FEATURESPEC, - __module__ = 'feast.specs.FeatureSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.FeatureSpec) - )) -_sym_db.RegisterMessage(FeatureSpec) -_sym_db.RegisterMessage(FeatureSpec.OptionsEntry) - -DataStores = _reflection.GeneratedProtocolMessageType('DataStores', (_message.Message,), dict( - DESCRIPTOR = _DATASTORES, - __module__ = 'feast.specs.FeatureSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.DataStores) - )) -_sym_db.RegisterMessage(DataStores) - -DataStore = _reflection.GeneratedProtocolMessageType('DataStore', (_message.Message,), dict( - - OptionsEntry = _reflection.GeneratedProtocolMessageType('OptionsEntry', (_message.Message,), dict( - DESCRIPTOR = _DATASTORE_OPTIONSENTRY, - __module__ = 'feast.specs.FeatureSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.DataStore.OptionsEntry) - )) - , - DESCRIPTOR = _DATASTORE, - __module__ = 'feast.specs.FeatureSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.DataStore) - )) -_sym_db.RegisterMessage(DataStore) -_sym_db.RegisterMessage(DataStore.OptionsEntry) - - -DESCRIPTOR._options = None -_FEATURESPEC_OPTIONSENTRY._options = None -_DATASTORE_OPTIONSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/ImportJobSpecs_pb2.py b/sdk/python/feast/specs/ImportJobSpecs_pb2.py deleted file mode 100644 index da84644da8c..00000000000 --- a/sdk/python/feast/specs/ImportJobSpecs_pb2.py +++ /dev/null @@ -1,123 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/ImportJobSpecs.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from feast.specs import ImportSpec_pb2 as feast_dot_specs_dot_ImportSpec__pb2 -from feast.specs import EntitySpec_pb2 as feast_dot_specs_dot_EntitySpec__pb2 -from feast.specs import FeatureSpec_pb2 as feast_dot_specs_dot_FeatureSpec__pb2 -from feast.specs import StorageSpec_pb2 as feast_dot_specs_dot_StorageSpec__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/ImportJobSpecs.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\023ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n feast/specs/ImportJobSpecs.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/ImportSpec.proto\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\"\xcf\x02\n\x0eImportJobSpecs\x12\r\n\x05jobId\x18\x01 \x01(\t\x12+\n\nimportSpec\x18\x02 \x01(\x0b\x32\x17.feast.specs.ImportSpec\x12,\n\x0b\x65ntitySpecs\x18\x03 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x12.\n\x0c\x66\x65\x61tureSpecs\x18\x04 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x12\x35\n\x13servingStorageSpecs\x18\x05 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x12\x37\n\x15warehouseStorageSpecs\x18\x06 \x03(\x0b\x32\x18.feast.specs.StorageSpec\x12\x33\n\x11\x65rrorsStorageSpec\x18\x07 \x01(\x0b\x32\x18.feast.specs.StorageSpecBZ\n\x0b\x66\x65\x61st.specsB\x13ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') - , - dependencies=[feast_dot_specs_dot_ImportSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,]) - - - - -_IMPORTJOBSPECS = _descriptor.Descriptor( - name='ImportJobSpecs', - full_name='feast.specs.ImportJobSpecs', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='jobId', full_name='feast.specs.ImportJobSpecs.jobId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='importSpec', full_name='feast.specs.ImportJobSpecs.importSpec', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entitySpecs', full_name='feast.specs.ImportJobSpecs.entitySpecs', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='featureSpecs', full_name='feast.specs.ImportJobSpecs.featureSpecs', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='servingStorageSpecs', full_name='feast.specs.ImportJobSpecs.servingStorageSpecs', index=4, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='warehouseStorageSpecs', full_name='feast.specs.ImportJobSpecs.warehouseStorageSpecs', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='errorsStorageSpec', full_name='feast.specs.ImportJobSpecs.errorsStorageSpec', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=172, - serialized_end=507, -) - -_IMPORTJOBSPECS.fields_by_name['importSpec'].message_type = feast_dot_specs_dot_ImportSpec__pb2._IMPORTSPEC -_IMPORTJOBSPECS.fields_by_name['entitySpecs'].message_type = feast_dot_specs_dot_EntitySpec__pb2._ENTITYSPEC -_IMPORTJOBSPECS.fields_by_name['featureSpecs'].message_type = feast_dot_specs_dot_FeatureSpec__pb2._FEATURESPEC -_IMPORTJOBSPECS.fields_by_name['servingStorageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -_IMPORTJOBSPECS.fields_by_name['warehouseStorageSpecs'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -_IMPORTJOBSPECS.fields_by_name['errorsStorageSpec'].message_type = feast_dot_specs_dot_StorageSpec__pb2._STORAGESPEC -DESCRIPTOR.message_types_by_name['ImportJobSpecs'] = _IMPORTJOBSPECS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImportJobSpecs = _reflection.GeneratedProtocolMessageType('ImportJobSpecs', (_message.Message,), dict( - DESCRIPTOR = _IMPORTJOBSPECS, - __module__ = 'feast.specs.ImportJobSpecs_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.ImportJobSpecs) - )) -_sym_db.RegisterMessage(ImportJobSpecs) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/ImportSpec_pb2.py b/sdk/python/feast/specs/ImportSpec_pb2.py deleted file mode 100644 index 60e5b4f400a..00000000000 --- a/sdk/python/feast/specs/ImportSpec_pb2.py +++ /dev/null @@ -1,314 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/ImportSpec.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/ImportSpec.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\017ImportSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/specs/ImportSpec.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x02\n\nImportSpec\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x41\n\rsourceOptions\x18\x02 \x03(\x0b\x32*.feast.specs.ImportSpec.SourceOptionsEntry\x12;\n\njobOptions\x18\x05 \x03(\x0b\x32\'.feast.specs.ImportSpec.JobOptionsEntry\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12#\n\x06schema\x18\x04 \x01(\x0b\x32\x13.feast.specs.Schema\x1a\x34\n\x12SourceOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fJobOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x06Schema\x12\"\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x12.feast.specs.Field\x12\x19\n\x0ftimestampColumn\x18\x05 \x01(\tH\x00\x12\x34\n\x0etimestampValue\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0e\x65ntityIdColumn\x18\x07 \x01(\tB\x0b\n\ttimestamp\"(\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tfeatureId\x18\x02 \x01(\tBV\n\x0b\x66\x65\x61st.specsB\x0fImportSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_IMPORTSPEC_SOURCEOPTIONSENTRY = _descriptor.Descriptor( - name='SourceOptionsEntry', - full_name='feast.specs.ImportSpec.SourceOptionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.specs.ImportSpec.SourceOptionsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.specs.ImportSpec.SourceOptionsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=290, - serialized_end=342, -) - -_IMPORTSPEC_JOBOPTIONSENTRY = _descriptor.Descriptor( - name='JobOptionsEntry', - full_name='feast.specs.ImportSpec.JobOptionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.specs.ImportSpec.JobOptionsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.specs.ImportSpec.JobOptionsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=344, - serialized_end=393, -) - -_IMPORTSPEC = _descriptor.Descriptor( - name='ImportSpec', - full_name='feast.specs.ImportSpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='feast.specs.ImportSpec.type', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sourceOptions', full_name='feast.specs.ImportSpec.sourceOptions', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='jobOptions', full_name='feast.specs.ImportSpec.jobOptions', index=2, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entities', full_name='feast.specs.ImportSpec.entities', index=3, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='schema', full_name='feast.specs.ImportSpec.schema', index=4, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_IMPORTSPEC_SOURCEOPTIONSENTRY, _IMPORTSPEC_JOBOPTIONSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=79, - serialized_end=393, -) - - -_SCHEMA = _descriptor.Descriptor( - name='Schema', - full_name='feast.specs.Schema', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='feast.specs.Schema.fields', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestampColumn', full_name='feast.specs.Schema.timestampColumn', index=1, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestampValue', full_name='feast.specs.Schema.timestampValue', index=2, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entityIdColumn', full_name='feast.specs.Schema.entityIdColumn', index=3, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='timestamp', full_name='feast.specs.Schema.timestamp', - index=0, containing_type=None, fields=[]), - ], - serialized_start=396, - serialized_end=558, -) - - -_FIELD = _descriptor.Descriptor( - name='Field', - full_name='feast.specs.Field', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='feast.specs.Field.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='featureId', full_name='feast.specs.Field.featureId', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=560, - serialized_end=600, -) - -_IMPORTSPEC_SOURCEOPTIONSENTRY.containing_type = _IMPORTSPEC -_IMPORTSPEC_JOBOPTIONSENTRY.containing_type = _IMPORTSPEC -_IMPORTSPEC.fields_by_name['sourceOptions'].message_type = _IMPORTSPEC_SOURCEOPTIONSENTRY -_IMPORTSPEC.fields_by_name['jobOptions'].message_type = _IMPORTSPEC_JOBOPTIONSENTRY -_IMPORTSPEC.fields_by_name['schema'].message_type = _SCHEMA -_SCHEMA.fields_by_name['fields'].message_type = _FIELD -_SCHEMA.fields_by_name['timestampValue'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEMA.oneofs_by_name['timestamp'].fields.append( - _SCHEMA.fields_by_name['timestampColumn']) -_SCHEMA.fields_by_name['timestampColumn'].containing_oneof = _SCHEMA.oneofs_by_name['timestamp'] -_SCHEMA.oneofs_by_name['timestamp'].fields.append( - _SCHEMA.fields_by_name['timestampValue']) -_SCHEMA.fields_by_name['timestampValue'].containing_oneof = _SCHEMA.oneofs_by_name['timestamp'] -DESCRIPTOR.message_types_by_name['ImportSpec'] = _IMPORTSPEC -DESCRIPTOR.message_types_by_name['Schema'] = _SCHEMA -DESCRIPTOR.message_types_by_name['Field'] = _FIELD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImportSpec = _reflection.GeneratedProtocolMessageType('ImportSpec', (_message.Message,), dict( - - SourceOptionsEntry = _reflection.GeneratedProtocolMessageType('SourceOptionsEntry', (_message.Message,), dict( - DESCRIPTOR = _IMPORTSPEC_SOURCEOPTIONSENTRY, - __module__ = 'feast.specs.ImportSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.ImportSpec.SourceOptionsEntry) - )) - , - - JobOptionsEntry = _reflection.GeneratedProtocolMessageType('JobOptionsEntry', (_message.Message,), dict( - DESCRIPTOR = _IMPORTSPEC_JOBOPTIONSENTRY, - __module__ = 'feast.specs.ImportSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.ImportSpec.JobOptionsEntry) - )) - , - DESCRIPTOR = _IMPORTSPEC, - __module__ = 'feast.specs.ImportSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.ImportSpec) - )) -_sym_db.RegisterMessage(ImportSpec) -_sym_db.RegisterMessage(ImportSpec.SourceOptionsEntry) -_sym_db.RegisterMessage(ImportSpec.JobOptionsEntry) - -Schema = _reflection.GeneratedProtocolMessageType('Schema', (_message.Message,), dict( - DESCRIPTOR = _SCHEMA, - __module__ = 'feast.specs.ImportSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.Schema) - )) -_sym_db.RegisterMessage(Schema) - -Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), dict( - DESCRIPTOR = _FIELD, - __module__ = 'feast.specs.ImportSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.Field) - )) -_sym_db.RegisterMessage(Field) - - -DESCRIPTOR._options = None -_IMPORTSPEC_SOURCEOPTIONSENTRY._options = None -_IMPORTSPEC_JOBOPTIONSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/StorageSpec_pb2.py b/sdk/python/feast/specs/StorageSpec_pb2.py deleted file mode 100644 index 5f4b89bf1af..00000000000 --- a/sdk/python/feast/specs/StorageSpec_pb2.py +++ /dev/null @@ -1,132 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/specs/StorageSpec.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/specs/StorageSpec.proto', - package='feast.specs', - syntax='proto3', - serialized_options=_b('\n\013feast.specsB\020StorageSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n\x1d\x66\x65\x61st/specs/StorageSpec.proto\x12\x0b\x66\x65\x61st.specs\"\x8f\x01\n\x0bStorageSpec\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x36\n\x07options\x18\x03 \x03(\x0b\x32%.feast.specs.StorageSpec.OptionsEntry\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42W\n\x0b\x66\x65\x61st.specsB\x10StorageSpecProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') -) - - - - -_STORAGESPEC_OPTIONSENTRY = _descriptor.Descriptor( - name='OptionsEntry', - full_name='feast.specs.StorageSpec.OptionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='feast.specs.StorageSpec.OptionsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.specs.StorageSpec.OptionsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=_b('8\001'), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=144, - serialized_end=190, -) - -_STORAGESPEC = _descriptor.Descriptor( - name='StorageSpec', - full_name='feast.specs.StorageSpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.specs.StorageSpec.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='type', full_name='feast.specs.StorageSpec.type', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='options', full_name='feast.specs.StorageSpec.options', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_STORAGESPEC_OPTIONSENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=47, - serialized_end=190, -) - -_STORAGESPEC_OPTIONSENTRY.containing_type = _STORAGESPEC -_STORAGESPEC.fields_by_name['options'].message_type = _STORAGESPEC_OPTIONSENTRY -DESCRIPTOR.message_types_by_name['StorageSpec'] = _STORAGESPEC -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -StorageSpec = _reflection.GeneratedProtocolMessageType('StorageSpec', (_message.Message,), dict( - - OptionsEntry = _reflection.GeneratedProtocolMessageType('OptionsEntry', (_message.Message,), dict( - DESCRIPTOR = _STORAGESPEC_OPTIONSENTRY, - __module__ = 'feast.specs.StorageSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.StorageSpec.OptionsEntry) - )) - , - DESCRIPTOR = _STORAGESPEC, - __module__ = 'feast.specs.StorageSpec_pb2' - # @@protoc_insertion_point(class_scope:feast.specs.StorageSpec) - )) -_sym_db.RegisterMessage(StorageSpec) -_sym_db.RegisterMessage(StorageSpec.OptionsEntry) - - -DESCRIPTOR._options = None -_STORAGESPEC_OPTIONSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/specs/__init__.py b/sdk/python/feast/specs/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/sdk/python/feast/storage/BigTable_pb2.py b/sdk/python/feast/storage/BigTable_pb2.py deleted file mode 100644 index 4b59ff8b177..00000000000 --- a/sdk/python/feast/storage/BigTable_pb2.py +++ /dev/null @@ -1,84 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: feast/storage/BigTable.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/storage/BigTable.proto', - package='feast.storage', - syntax='proto3', - serialized_options=_b('\n\rfeast.storageB\rBigTableProtoZ8github.com/gojek/feast/protos/generated/go/feast/storage'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/storage/BigTable.proto\x12\rfeast.storage\"O\n\x0e\x42igTableRowKey\x12\x12\n\nsha1Prefix\x18\x01 \x01(\t\x12\x11\n\tentityKey\x18\x02 \x01(\t\x12\x16\n\x0ereversedMillis\x18\x03 \x01(\tBX\n\rfeast.storageB\rBigTableProtoZ8github.com/gojek/feast/protos/generated/go/feast/storageb\x06proto3') -) - - - - -_BIGTABLEROWKEY = _descriptor.Descriptor( - name='BigTableRowKey', - full_name='feast.storage.BigTableRowKey', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sha1Prefix', full_name='feast.storage.BigTableRowKey.sha1Prefix', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entityKey', full_name='feast.storage.BigTableRowKey.entityKey', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='reversedMillis', full_name='feast.storage.BigTableRowKey.reversedMillis', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=47, - serialized_end=126, -) - -DESCRIPTOR.message_types_by_name['BigTableRowKey'] = _BIGTABLEROWKEY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -BigTableRowKey = _reflection.GeneratedProtocolMessageType('BigTableRowKey', (_message.Message,), dict( - DESCRIPTOR = _BIGTABLEROWKEY, - __module__ = 'feast.storage.BigTable_pb2' - # @@protoc_insertion_point(class_scope:feast.storage.BigTableRowKey) - )) -_sym_db.RegisterMessage(BigTableRowKey) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/storage/Redis_pb2.py b/sdk/python/feast/storage/Redis_pb2.py index 2220469d497..49b0b793781 100644 --- a/sdk/python/feast/storage/Redis_pb2.py +++ b/sdk/python/feast/storage/Redis_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/storage/Redis.proto @@ -12,115 +13,38 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.types import Field_pb2 as feast_dot_types_dot_Field__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='feast/storage/Redis.proto', package='feast.storage', syntax='proto3', - serialized_options=_b('\n\rfeast.storageB\nRedisProtoZ8github.com/gojek/feast/protos/generated/go/feast/storage'), - serialized_pb=_b('\n\x19\x66\x65\x61st/storage/Redis.proto\x12\rfeast.storage\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"R\n\x0eRedisBucketKey\x12\x11\n\tentityKey\x18\x02 \x01(\t\x12\x1b\n\x13\x66\x65\x61tureIdSha1Prefix\x18\x03 \x01(\t\x12\x10\n\x08\x62ucketId\x18\x04 \x01(\x06\"i\n\x10RedisBucketValue\x12!\n\x05value\x18\x01 \x01(\x0b\x32\x12.feast.types.Value\x12\x32\n\x0e\x65ventTimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"G\n\x14RedisBucketValueList\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.feast.storage.RedisBucketValueBU\n\rfeast.storageB\nRedisProtoZ8github.com/gojek/feast/protos/generated/go/feast/storageb\x06proto3') + serialized_options=_b('\n\rfeast.storageB\nRedisProtoZ2github.com/gojek/feast/sdk/go/protos/feast/storage'), + serialized_pb=_b('\n\x19\x66\x65\x61st/storage/Redis.proto\x12\rfeast.storage\x1a\x17\x66\x65\x61st/types/Field.proto\"E\n\x08RedisKey\x12\x13\n\x0b\x66\x65\x61ture_set\x18\x02 \x01(\t\x12$\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x12.feast.types.FieldBO\n\rfeast.storageB\nRedisProtoZ2github.com/gojek/feast/sdk/go/protos/feast/storageb\x06proto3') , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) + dependencies=[feast_dot_types_dot_Field__pb2.DESCRIPTOR,]) -_REDISBUCKETKEY = _descriptor.Descriptor( - name='RedisBucketKey', - full_name='feast.storage.RedisBucketKey', +_REDISKEY = _descriptor.Descriptor( + name='RedisKey', + full_name='feast.storage.RedisKey', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='entityKey', full_name='feast.storage.RedisBucketKey.entityKey', index=0, + name='feature_set', full_name='feast.storage.RedisKey.feature_set', index=0, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='featureIdSha1Prefix', full_name='feast.storage.RedisBucketKey.featureIdSha1Prefix', index=1, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bucketId', full_name='feast.storage.RedisBucketKey.bucketId', index=2, - number=4, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=102, - serialized_end=184, -) - - -_REDISBUCKETVALUE = _descriptor.Descriptor( - name='RedisBucketValue', - full_name='feast.storage.RedisBucketValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='feast.storage.RedisBucketValue.value', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='eventTimestamp', full_name='feast.storage.RedisBucketValue.eventTimestamp', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=186, - serialized_end=291, -) - - -_REDISBUCKETVALUELIST = _descriptor.Descriptor( - name='RedisBucketValueList', - full_name='feast.storage.RedisBucketValueList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='feast.storage.RedisBucketValueList.values', index=0, - number=1, type=11, cpp_type=10, label=3, + name='entities', full_name='feast.storage.RedisKey.entities', index=1, + number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, @@ -137,38 +61,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=293, - serialized_end=364, + serialized_start=69, + serialized_end=138, ) -_REDISBUCKETVALUE.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE -_REDISBUCKETVALUE.fields_by_name['eventTimestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_REDISBUCKETVALUELIST.fields_by_name['values'].message_type = _REDISBUCKETVALUE -DESCRIPTOR.message_types_by_name['RedisBucketKey'] = _REDISBUCKETKEY -DESCRIPTOR.message_types_by_name['RedisBucketValue'] = _REDISBUCKETVALUE -DESCRIPTOR.message_types_by_name['RedisBucketValueList'] = _REDISBUCKETVALUELIST +_REDISKEY.fields_by_name['entities'].message_type = feast_dot_types_dot_Field__pb2._FIELD +DESCRIPTOR.message_types_by_name['RedisKey'] = _REDISKEY _sym_db.RegisterFileDescriptor(DESCRIPTOR) -RedisBucketKey = _reflection.GeneratedProtocolMessageType('RedisBucketKey', (_message.Message,), dict( - DESCRIPTOR = _REDISBUCKETKEY, - __module__ = 'feast.storage.Redis_pb2' - # @@protoc_insertion_point(class_scope:feast.storage.RedisBucketKey) - )) -_sym_db.RegisterMessage(RedisBucketKey) - -RedisBucketValue = _reflection.GeneratedProtocolMessageType('RedisBucketValue', (_message.Message,), dict( - DESCRIPTOR = _REDISBUCKETVALUE, - __module__ = 'feast.storage.Redis_pb2' - # @@protoc_insertion_point(class_scope:feast.storage.RedisBucketValue) - )) -_sym_db.RegisterMessage(RedisBucketValue) - -RedisBucketValueList = _reflection.GeneratedProtocolMessageType('RedisBucketValueList', (_message.Message,), dict( - DESCRIPTOR = _REDISBUCKETVALUELIST, - __module__ = 'feast.storage.Redis_pb2' - # @@protoc_insertion_point(class_scope:feast.storage.RedisBucketValueList) - )) -_sym_db.RegisterMessage(RedisBucketValueList) +RedisKey = _reflection.GeneratedProtocolMessageType('RedisKey', (_message.Message,), { + 'DESCRIPTOR' : _REDISKEY, + '__module__' : 'feast.storage.Redis_pb2' + # @@protoc_insertion_point(class_scope:feast.storage.RedisKey) + }) +_sym_db.RegisterMessage(RedisKey) DESCRIPTOR._options = None diff --git a/sdk/python/feast/storage/Redis_pb2.pyi b/sdk/python/feast/storage/Redis_pb2.pyi new file mode 100644 index 00000000000..717aae79db2 --- /dev/null +++ b/sdk/python/feast/storage/Redis_pb2.pyi @@ -0,0 +1,49 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Field_pb2 import ( + Field as feast___types___Field_pb2___Field, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class RedisKey(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + feature_set = ... # type: typing___Text + + @property + def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___types___Field_pb2___Field]: ... + + def __init__(self, + *, + feature_set : typing___Optional[typing___Text] = None, + entities : typing___Optional[typing___Iterable[feast___types___Field_pb2___Field]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> RedisKey: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"feature_set"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"entities",b"entities",u"feature_set",b"feature_set"]) -> None: ... diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py new file mode 100644 index 00000000000..9fb1d3fda0c --- /dev/null +++ b/sdk/python/feast/type_map.py @@ -0,0 +1,266 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import pandas as pd +from datetime import datetime, timezone +from feast.value_type import ValueType +from feast.types.Value_pb2 import ( + Value as ProtoValue, + ValueType as ProtoValueType, + Int64List, + Int32List, + BoolList, + BytesList, + DoubleList, + StringList, + FloatList, +) +from feast.types import FeatureRow_pb2 as FeatureRowProto, Field_pb2 as FieldProto +from google.protobuf.timestamp_pb2 import Timestamp +from feast.constants import DATETIME_COLUMN + +# Mapping of feast value type to Pandas DataFrame dtypes +# Integer and floating values are all 64-bit for better integration +# with BigQuery data types +FEAST_VALUE_TYPE_TO_DTYPE = { + "BYTES": np.byte, + "STRING": np.object, + "INT32": "Int32", # Use pandas nullable int type + "INT64": "Int64", # Use pandas nullable int type + "DOUBLE": np.float64, + "FLOAT": np.float64, + "BOOL": np.bool, +} + +FEAST_VALUE_ATTR_TO_DTYPE = { + "bytes_val": np.byte, + "string_val": np.object, + "int32_val": "Int32", + "int64_val": "Int64", + "double_val": np.float64, + "float_val": np.float64, + "bool_val": np.bool, +} + + +def dtype_to_feast_value_attr(dtype): + # Mapping of Pandas dtype to attribute name in Feast Value + type_map = { + "float64": "double_val", + "float32": "float_val", + "int64": "int64_val", + "uint64": "int64_val", + "int32": "int32_val", + "uint32": "int32_val", + "uint8": "int32_val", + "int8": "int32_val", + "bool": "bool_val", + "timedelta": "int64_val", + "datetime64[ns]": "int64_val", + "datetime64[ns, UTC]": "int64_val", + "category": "string_val", + "object": "string_val", + } + return type_map[dtype.__str__()] + + +def dtype_to_value_type(dtype): + """Returns the equivalent feast valueType for the given dtype + Args: + dtype (pandas.dtype): pandas dtype + Returns: + feast.types.ValueType2.ValueType: equivalent feast valuetype + """ + # mapping of pandas dtypes to feast value type strings + type_map = { + "float64": ProtoValueType.DOUBLE, + "float32": ProtoValueType.FLOAT, + "int64": ProtoValueType.INT64, + "uint64": ProtoValueType.INT64, + "int32": ProtoValueType.INT32, + "uint32": ProtoValueType.INT32, + "uint8": ProtoValueType.INT32, + "int8": ProtoValueType.INT32, + "bool": ProtoValueType.BOOL, + "timedelta": ProtoValueType.INT64, + "datetime64[ns]": ProtoValueType.INT64, + "datetime64[ns, UTC]": ProtoValueType.INT64, + "category": ProtoValueType.STRING, + "object": ProtoValueType.STRING, + } + return type_map[dtype.__str__()] + + +# TODO: to pass test_importer +def pandas_dtype_to_feast_value_type(dtype: pd.DataFrame.dtypes) -> ValueType: + type_map = { + "float64": ValueType.DOUBLE, + "float32": ValueType.FLOAT, + "int64": ValueType.INT64, + "uint64": ValueType.INT64, + "int32": ValueType.INT32, + "uint32": ValueType.INT32, + "uint8": ValueType.INT32, + "int8": ValueType.INT32, + "bool": ValueType.BOOL, + "timedelta": ValueType.INT64, + "datetime64[ns]": ValueType.INT64, + "datetime64[ns, tz]": ValueType.INT64, + "category": ValueType.STRING, + "object": ValueType.STRING, + } + return type_map[dtype.__str__()] + + +def convert_df_to_feature_rows(dataframe: pd.DataFrame, feature_set): + def convert_series_to_proto_values(row: pd.Series): + feature_row = FeatureRowProto.FeatureRow( + event_timestamp=pd_datetime_to_timestamp_proto( + dataframe[DATETIME_COLUMN].dtype, row[DATETIME_COLUMN] + ), + feature_set=feature_set.name + ":" + str(feature_set.version), + ) + + for field_name, field in feature_set.fields.items(): + feature_row.fields.extend( + [ + FieldProto.Field( + name=field.name, + value=pd_value_to_proto_value(field.dtype, row[field.name]), + ) + ] + ) + return feature_row + + return convert_series_to_proto_values + + +def pd_datetime_to_timestamp_proto(dtype, value) -> Timestamp: + if type(value) in [np.float64, np.float32, np.int32, np.int64]: + return Timestamp(seconds=int(value)) + if dtype.__str__() == "datetime64[ns]": + # If timestamp does not contain timezone, we assume it is of local + # timezone and adjust it to UTC + local_timezone = datetime.now(timezone.utc).astimezone().tzinfo + value = value.tz_localize(local_timezone).tz_convert("UTC").tz_localize(None) + return Timestamp(seconds=int(value.timestamp())) + if dtype.__str__() == "datetime64[ns, UTC]": + return Timestamp(seconds=int(value.timestamp())) + else: + return Timestamp(seconds=np.datetime64(value).astype("int64") // 1000000) + + +def type_err(item, dtype): + raise ValueError(f'Value "{item}" is of type {type(item)} not of type {dtype}') + + +def pd_value_to_proto_value(feast_value_type, value) -> ProtoValue: + + # Detect list type and handle separately + if "list" in feast_value_type.name.lower(): + + if feast_value_type == ValueType.FLOAT_LIST: + return ProtoValue( + float_list_val=FloatList( + val=[ + item if type(item) is np.float32 else type_err(item, np.float32) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.DOUBLE_LIST: + return ProtoValue( + double_list_val=DoubleList( + val=[ + item if type(item) is np.float64 else type_err(item, np.float64) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.INT32_LIST: + return ProtoValue( + int32_list_val=Int32List( + val=[ + item if type(item) is np.int32 else type_err(item, np.int32) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.INT64_LIST: + return ProtoValue( + int64_list_val=Int64List( + val=[ + item if type(item) is np.int64 else type_err(item, np.int64) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.STRING_LIST: + return ProtoValue( + string_list_val=StringList( + val=[ + item if type(item) is np.str_ else type_err(item, np.str_) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.BOOL_LIST: + return ProtoValue( + bool_list_val=BoolList( + val=[ + item if type(item) is np.bool_ else type_err(item, np.bool_) + for item in value + ] + ) + ) + + if feast_value_type == ValueType.BYTES_LIST: + return ProtoValue( + bytes_list_val=BytesList( + val=[ + item if type(item) is np.bytes_ else type_err(item, np.bytes_) + for item in value + ] + ) + ) + + # Handle scalar types below + else: + if pd.isnull(value): + return ProtoValue() + elif feast_value_type == ValueType.INT32: + return ProtoValue(int32_val=int(value)) + elif feast_value_type == ValueType.INT64: + return ProtoValue(int64_val=int(value)) + elif feast_value_type == ValueType.FLOAT: + return ProtoValue(float_val=float(value)) + elif feast_value_type == ValueType.DOUBLE: + assert type(value) is float + return ProtoValue(double_val=value) + elif feast_value_type == ValueType.STRING: + return ProtoValue(string_val=str(value)) + elif feast_value_type == ValueType.BYTES: + assert type(value) is bytes + return ProtoValue(bytes_val=value) + elif feast_value_type == ValueType.BOOL: + assert type(value) is bool + return ProtoValue(bool_val=value) + + raise Exception(f"Unsupported data type: ${str(type(value))}") diff --git a/sdk/python/feast/types/FeatureRowExtended_pb2.py b/sdk/python/feast/types/FeatureRowExtended_pb2.py index 21597cc3045..e7372958168 100644 --- a/sdk/python/feast/types/FeatureRowExtended_pb2.py +++ b/sdk/python/feast/types/FeatureRowExtended_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/types/FeatureRowExtended.proto @@ -20,8 +21,8 @@ name='feast/types/FeatureRowExtended.proto', package='feast.types', syntax='proto3', - serialized_options=_b('\n\013feast.typesB\027FeatureRowExtendedProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n$feast/types/FeatureRowExtended.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/types/FeatureRow.proto\"N\n\x05\x45rror\x12\r\n\x05\x63\x61use\x18\x01 \x01(\t\x12\x11\n\ttransform\x18\x02 \x01(\t\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\nstackTrace\x18\x04 \x01(\t\">\n\x07\x41ttempt\x12\x10\n\x08\x61ttempts\x18\x01 \x01(\x05\x12!\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x12.feast.types.Error\"\x94\x01\n\x12\x46\x65\x61tureRowExtended\x12$\n\x03row\x18\x01 \x01(\x0b\x32\x17.feast.types.FeatureRow\x12)\n\x0blastAttempt\x18\x02 \x01(\x0b\x32\x14.feast.types.Attempt\x12-\n\tfirstSeen\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB^\n\x0b\x66\x65\x61st.typesB\x17\x46\x65\x61tureRowExtendedProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') + serialized_options=_b('\n\013feast.typesB\027FeatureRowExtendedProtoZ0github.com/gojek/feast/sdk/go/protos/feast/types'), + serialized_pb=_b('\n$feast/types/FeatureRowExtended.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/types/FeatureRow.proto\"O\n\x05\x45rror\x12\r\n\x05\x63\x61use\x18\x01 \x01(\t\x12\x11\n\ttransform\x18\x02 \x01(\t\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x13\n\x0bstack_trace\x18\x04 \x01(\t\">\n\x07\x41ttempt\x12\x10\n\x08\x61ttempts\x18\x01 \x01(\x05\x12!\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x12.feast.types.Error\"\x96\x01\n\x12\x46\x65\x61tureRowExtended\x12$\n\x03row\x18\x01 \x01(\x0b\x32\x17.feast.types.FeatureRow\x12*\n\x0clast_attempt\x18\x02 \x01(\x0b\x32\x14.feast.types.Attempt\x12.\n\nfirst_seen\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampBX\n\x0b\x66\x65\x61st.typesB\x17\x46\x65\x61tureRowExtendedProtoZ0github.com/gojek/feast/sdk/go/protos/feast/typesb\x06proto3') , dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_FeatureRow__pb2.DESCRIPTOR,]) @@ -57,7 +58,7 @@ is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='stackTrace', full_name='feast.types.Error.stackTrace', index=3, + name='stack_trace', full_name='feast.types.Error.stack_trace', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, @@ -76,7 +77,7 @@ oneofs=[ ], serialized_start=116, - serialized_end=194, + serialized_end=195, ) @@ -113,8 +114,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=196, - serialized_end=258, + serialized_start=197, + serialized_end=259, ) @@ -133,14 +134,14 @@ is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='lastAttempt', full_name='feast.types.FeatureRowExtended.lastAttempt', index=1, + name='last_attempt', full_name='feast.types.FeatureRowExtended.last_attempt', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='firstSeen', full_name='feast.types.FeatureRowExtended.firstSeen', index=2, + name='first_seen', full_name='feast.types.FeatureRowExtended.first_seen', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -158,38 +159,38 @@ extension_ranges=[], oneofs=[ ], - serialized_start=261, - serialized_end=409, + serialized_start=262, + serialized_end=412, ) _ATTEMPT.fields_by_name['error'].message_type = _ERROR _FEATUREROWEXTENDED.fields_by_name['row'].message_type = feast_dot_types_dot_FeatureRow__pb2._FEATUREROW -_FEATUREROWEXTENDED.fields_by_name['lastAttempt'].message_type = _ATTEMPT -_FEATUREROWEXTENDED.fields_by_name['firstSeen'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_FEATUREROWEXTENDED.fields_by_name['last_attempt'].message_type = _ATTEMPT +_FEATUREROWEXTENDED.fields_by_name['first_seen'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP DESCRIPTOR.message_types_by_name['Error'] = _ERROR DESCRIPTOR.message_types_by_name['Attempt'] = _ATTEMPT DESCRIPTOR.message_types_by_name['FeatureRowExtended'] = _FEATUREROWEXTENDED _sym_db.RegisterFileDescriptor(DESCRIPTOR) -Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), dict( - DESCRIPTOR = _ERROR, - __module__ = 'feast.types.FeatureRowExtended_pb2' +Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), { + 'DESCRIPTOR' : _ERROR, + '__module__' : 'feast.types.FeatureRowExtended_pb2' # @@protoc_insertion_point(class_scope:feast.types.Error) - )) + }) _sym_db.RegisterMessage(Error) -Attempt = _reflection.GeneratedProtocolMessageType('Attempt', (_message.Message,), dict( - DESCRIPTOR = _ATTEMPT, - __module__ = 'feast.types.FeatureRowExtended_pb2' +Attempt = _reflection.GeneratedProtocolMessageType('Attempt', (_message.Message,), { + 'DESCRIPTOR' : _ATTEMPT, + '__module__' : 'feast.types.FeatureRowExtended_pb2' # @@protoc_insertion_point(class_scope:feast.types.Attempt) - )) + }) _sym_db.RegisterMessage(Attempt) -FeatureRowExtended = _reflection.GeneratedProtocolMessageType('FeatureRowExtended', (_message.Message,), dict( - DESCRIPTOR = _FEATUREROWEXTENDED, - __module__ = 'feast.types.FeatureRowExtended_pb2' +FeatureRowExtended = _reflection.GeneratedProtocolMessageType('FeatureRowExtended', (_message.Message,), { + 'DESCRIPTOR' : _FEATUREROWEXTENDED, + '__module__' : 'feast.types.FeatureRowExtended_pb2' # @@protoc_insertion_point(class_scope:feast.types.FeatureRowExtended) - )) + }) _sym_db.RegisterMessage(FeatureRowExtended) diff --git a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi new file mode 100644 index 00000000000..4f3d02c8ee6 --- /dev/null +++ b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi @@ -0,0 +1,102 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.FeatureRow_pb2 import ( + FeatureRow as feast___types___FeatureRow_pb2___FeatureRow, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class Error(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + cause = ... # type: typing___Text + transform = ... # type: typing___Text + message = ... # type: typing___Text + stack_trace = ... # type: typing___Text + + def __init__(self, + *, + cause : typing___Optional[typing___Text] = None, + transform : typing___Optional[typing___Text] = None, + message : typing___Optional[typing___Text] = None, + stack_trace : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Error: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"cause",u"message",u"stack_trace",u"transform"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"cause",b"cause",u"message",b"message",u"stack_trace",b"stack_trace",u"transform",b"transform"]) -> None: ... + +class Attempt(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + attempts = ... # type: int + + @property + def error(self) -> Error: ... + + def __init__(self, + *, + attempts : typing___Optional[int] = None, + error : typing___Optional[Error] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Attempt: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"error"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"attempts",u"error"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"error",b"error"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"attempts",b"attempts",u"error",b"error"]) -> None: ... + +class FeatureRowExtended(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def row(self) -> feast___types___FeatureRow_pb2___FeatureRow: ... + + @property + def last_attempt(self) -> Attempt: ... + + @property + def first_seen(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + *, + row : typing___Optional[feast___types___FeatureRow_pb2___FeatureRow] = None, + last_attempt : typing___Optional[Attempt] = None, + first_seen : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureRowExtended: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"first_seen",u"last_attempt",u"row"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"first_seen",u"last_attempt",u"row"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"first_seen",b"first_seen",u"last_attempt",b"last_attempt",u"row",b"row"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"first_seen",b"first_seen",u"last_attempt",b"last_attempt",u"row",b"row"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRow_pb2.py b/sdk/python/feast/types/FeatureRow_pb2.py index 4096955df82..1b6c16910f2 100644 --- a/sdk/python/feast/types/FeatureRow_pb2.py +++ b/sdk/python/feast/types/FeatureRow_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/types/FeatureRow.proto @@ -13,66 +14,21 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from feast.types import Feature_pb2 as feast_dot_types_dot_Feature__pb2 +from feast.types import Field_pb2 as feast_dot_types_dot_Field__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='feast/types/FeatureRow.proto', package='feast.types', syntax='proto3', - serialized_options=_b('\n\013feast.typesB\017FeatureRowProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/types/FeatureRow.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x66\x65\x61st/types/Feature.proto\"j\n\rFeatureRowKey\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\t\"\x8f\x01\n\nFeatureRow\x12\x11\n\tentityKey\x18\x01 \x01(\t\x12&\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x14.feast.types.Feature\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\nentityName\x18\x04 \x01(\tBV\n\x0b\x66\x65\x61st.typesB\x0f\x46\x65\x61tureRowProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') + serialized_options=_b('\n\013feast.typesB\017FeatureRowProtoZ0github.com/gojek/feast/sdk/go/protos/feast/types'), + serialized_pb=_b('\n\x1c\x66\x65\x61st/types/FeatureRow.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Field.proto\"z\n\nFeatureRow\x12\"\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x12.feast.types.Field\x12\x33\n\x0f\x65vent_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x66\x65\x61ture_set\x18\x06 \x01(\tBP\n\x0b\x66\x65\x61st.typesB\x0f\x46\x65\x61tureRowProtoZ0github.com/gojek/feast/sdk/go/protos/feast/typesb\x06proto3') , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Feature__pb2.DESCRIPTOR,]) + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,feast_dot_types_dot_Field__pb2.DESCRIPTOR,]) -_FEATUREROWKEY = _descriptor.Descriptor( - name='FeatureRowKey', - full_name='feast.types.FeatureRowKey', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entityKey', full_name='feast.types.FeatureRowKey.entityKey', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='eventTimestamp', full_name='feast.types.FeatureRowKey.eventTimestamp', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entityName', full_name='feast.types.FeatureRowKey.entityName', index=2, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=105, - serialized_end=211, -) - - _FEATUREROW = _descriptor.Descriptor( name='FeatureRow', full_name='feast.types.FeatureRow', @@ -81,29 +37,22 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='entityKey', full_name='feast.types.FeatureRow.entityKey', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='features', full_name='feast.types.FeatureRow.features', index=1, + name='fields', full_name='feast.types.FeatureRow.fields', index=0, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='eventTimestamp', full_name='feast.types.FeatureRow.eventTimestamp', index=2, + name='event_timestamp', full_name='feast.types.FeatureRow.event_timestamp', index=1, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='entityName', full_name='feast.types.FeatureRow.entityName', index=3, - number=4, type=9, cpp_type=9, label=1, + name='feature_set', full_name='feast.types.FeatureRow.feature_set', index=2, + number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, @@ -120,29 +69,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=214, - serialized_end=357, + serialized_start=103, + serialized_end=225, ) -_FEATUREROWKEY.fields_by_name['eventTimestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_FEATUREROW.fields_by_name['features'].message_type = feast_dot_types_dot_Feature__pb2._FEATURE -_FEATUREROW.fields_by_name['eventTimestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name['FeatureRowKey'] = _FEATUREROWKEY +_FEATUREROW.fields_by_name['fields'].message_type = feast_dot_types_dot_Field__pb2._FIELD +_FEATUREROW.fields_by_name['event_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP DESCRIPTOR.message_types_by_name['FeatureRow'] = _FEATUREROW _sym_db.RegisterFileDescriptor(DESCRIPTOR) -FeatureRowKey = _reflection.GeneratedProtocolMessageType('FeatureRowKey', (_message.Message,), dict( - DESCRIPTOR = _FEATUREROWKEY, - __module__ = 'feast.types.FeatureRow_pb2' - # @@protoc_insertion_point(class_scope:feast.types.FeatureRowKey) - )) -_sym_db.RegisterMessage(FeatureRowKey) - -FeatureRow = _reflection.GeneratedProtocolMessageType('FeatureRow', (_message.Message,), dict( - DESCRIPTOR = _FEATUREROW, - __module__ = 'feast.types.FeatureRow_pb2' +FeatureRow = _reflection.GeneratedProtocolMessageType('FeatureRow', (_message.Message,), { + 'DESCRIPTOR' : _FEATUREROW, + '__module__' : 'feast.types.FeatureRow_pb2' # @@protoc_insertion_point(class_scope:feast.types.FeatureRow) - )) + }) _sym_db.RegisterMessage(FeatureRow) diff --git a/sdk/python/feast/types/FeatureRow_pb2.pyi b/sdk/python/feast/types/FeatureRow_pb2.pyi new file mode 100644 index 00000000000..9bf745f9130 --- /dev/null +++ b/sdk/python/feast/types/FeatureRow_pb2.pyi @@ -0,0 +1,59 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Field_pb2 import ( + Field as feast___types___Field_pb2___Field, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeatureRow(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + feature_set = ... # type: typing___Text + + @property + def fields(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___types___Field_pb2___Field]: ... + + @property + def event_timestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + *, + fields : typing___Optional[typing___Iterable[feast___types___Field_pb2___Field]] = None, + event_timestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + feature_set : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureRow: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"event_timestamp",u"feature_set",u"fields"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp",b"event_timestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"event_timestamp",b"event_timestamp",u"feature_set",b"feature_set",u"fields",b"fields"]) -> None: ... diff --git a/sdk/python/feast/types/Feature_pb2.py b/sdk/python/feast/types/Feature_pb2.py index b880b4747e5..98d98d88a66 100644 --- a/sdk/python/feast/types/Feature_pb2.py +++ b/sdk/python/feast/types/Feature_pb2.py @@ -1,12 +1,15 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/types/Feature.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -16,63 +19,88 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='feast/types/Feature.proto', - package='feast.types', - syntax='proto3', - serialized_options=_b('\n\013feast.typesB\014FeatureProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n\x19\x66\x65\x61st/types/Feature.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"8\n\x07\x46\x65\x61ture\x12\n\n\x02id\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.ValueBS\n\x0b\x66\x65\x61st.typesB\x0c\x46\x65\x61tureProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') - , - dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) - - + name="feast/types/Feature.proto", + package="feast.types", + syntax="proto3", + serialized_options=_b( + "\n\013feast.typesB\014FeatureProtoZ6github.com/gojek/feast/protos/generated/go/feast/types" + ), + serialized_pb=_b( + '\n\x19\x66\x65\x61st/types/Feature.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto":\n\x07\x46\x65\x61ture\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value\x12\x0c\n\x04name\x18\x03 \x01(\tBS\n\x0b\x66\x65\x61st.typesB\x0c\x46\x65\x61tureProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3' + ), + dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR], +) _FEATURE = _descriptor.Descriptor( - name='Feature', - full_name='feast.types.Feature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='feast.types.Feature.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='feast.types.Feature.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=67, - serialized_end=123, + name="Feature", + full_name="feast.types.Feature", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="value", + full_name="feast.types.Feature.value", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="feast.types.Feature.name", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=67, + serialized_end=125, ) -_FEATURE.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE -DESCRIPTOR.message_types_by_name['Feature'] = _FEATURE +_FEATURE.fields_by_name["value"].message_type = feast_dot_types_dot_Value__pb2._VALUE +DESCRIPTOR.message_types_by_name["Feature"] = _FEATURE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -Feature = _reflection.GeneratedProtocolMessageType('Feature', (_message.Message,), dict( - DESCRIPTOR = _FEATURE, - __module__ = 'feast.types.Feature_pb2' - # @@protoc_insertion_point(class_scope:feast.types.Feature) - )) +Feature = _reflection.GeneratedProtocolMessageType( + "Feature", + (_message.Message,), + { + "DESCRIPTOR": _FEATURE, + "__module__": "feast.types.Feature_pb2" + # @@protoc_insertion_point(class_scope:feast.types.Feature) + }, +) _sym_db.RegisterMessage(Feature) diff --git a/sdk/python/feast/types/Feature_pb2.pyi b/sdk/python/feast/types/Feature_pb2.pyi new file mode 100644 index 00000000000..f31122f1da8 --- /dev/null +++ b/sdk/python/feast/types/Feature_pb2.pyi @@ -0,0 +1,44 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import Value as feast___types___Value_pb2___Value + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.message import Message as google___protobuf___message___Message + +from typing import Optional as typing___Optional, Text as typing___Text + +from typing_extensions import Literal as typing_extensions___Literal + +class Feature(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + @property + def value(self) -> feast___types___Value_pb2___Value: ... + def __init__( + self, + *, + value: typing___Optional[feast___types___Value_pb2___Value] = None, + name: typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Feature: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField( + self, field_name: typing_extensions___Literal["value"] + ) -> bool: ... + def ClearField( + self, field_name: typing_extensions___Literal["name", "value"] + ) -> None: ... + else: + def HasField( + self, field_name: typing_extensions___Literal["value", b"value"] + ) -> bool: ... + def ClearField( + self, + field_name: typing_extensions___Literal["name", b"name", "value", b"value"], + ) -> None: ... diff --git a/sdk/python/feast/types/Field_pb2.py b/sdk/python/feast/types/Field_pb2.py new file mode 100644 index 00000000000..95bcf38cf9d --- /dev/null +++ b/sdk/python/feast/types/Field_pb2.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/Field.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='feast/types/Field.proto', + package='feast.types', + syntax='proto3', + serialized_options=_b('\n\013feast.typesB\nFieldProtoZ0github.com/gojek/feast/sdk/go/protos/feast/types'), + serialized_pb=_b('\n\x17\x66\x65\x61st/types/Field.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"8\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.ValueBK\n\x0b\x66\x65\x61st.typesB\nFieldProtoZ0github.com/gojek/feast/sdk/go/protos/feast/typesb\x06proto3') + , + dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) + + + + +_FIELD = _descriptor.Descriptor( + name='Field', + full_name='feast.types.Field', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.types.Field.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='feast.types.Field.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=65, + serialized_end=121, +) + +_FIELD.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE +DESCRIPTOR.message_types_by_name['Field'] = _FIELD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), { + 'DESCRIPTOR' : _FIELD, + '__module__' : 'feast.types.Field_pb2' + # @@protoc_insertion_point(class_scope:feast.types.Field) + }) +_sym_db.RegisterMessage(Field) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/types/Field_pb2.pyi b/sdk/python/feast/types/Field_pb2.pyi new file mode 100644 index 00000000000..1305503fab7 --- /dev/null +++ b/sdk/python/feast/types/Field_pb2.pyi @@ -0,0 +1,46 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import ( + Value as feast___types___Value_pb2___Value, +) + +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class Field(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + + @property + def value(self) -> feast___types___Value_pb2___Value: ... + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + value : typing___Optional[feast___types___Value_pb2___Value] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Field: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value",b"value"]) -> None: ... diff --git a/sdk/python/feast/types/Value_pb2.py b/sdk/python/feast/types/Value_pb2.py index 2e9d3177fff..fe2cd125ca5 100644 --- a/sdk/python/feast/types/Value_pb2.py +++ b/sdk/python/feast/types/Value_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: feast/types/Value.proto @@ -12,17 +13,15 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='feast/types/Value.proto', package='feast.types', syntax='proto3', - serialized_options=_b('\n\013feast.typesB\nValueProtoZ6github.com/gojek/feast/protos/generated/go/feast/types'), - serialized_pb=_b('\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\x1a\x1fgoogle/protobuf/timestamp.proto\"}\n\tValueType\"p\n\x04\x45num\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\r\n\tTIMESTAMP\x10\x08\"\xcf\x01\n\x05Value\x12\x12\n\x08\x62ytesVal\x18\x01 \x01(\x0cH\x00\x12\x13\n\tstringVal\x18\x02 \x01(\tH\x00\x12\x12\n\x08int32Val\x18\x03 \x01(\x05H\x00\x12\x12\n\x08int64Val\x18\x04 \x01(\x03H\x00\x12\x13\n\tdoubleVal\x18\x05 \x01(\x01H\x00\x12\x12\n\x08\x66loatVal\x18\x06 \x01(\x02H\x00\x12\x11\n\x07\x62oolVal\x18\x07 \x01(\x08H\x00\x12\x32\n\x0ctimestampVal\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x05\n\x03val\"\x8a\x03\n\tValueList\x12+\n\tbytesList\x18\x01 \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12-\n\nstringList\x18\x02 \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12+\n\tint32List\x18\x03 \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12+\n\tint64List\x18\x04 \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12-\n\ndoubleList\x18\x05 \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12+\n\tfloatList\x18\x06 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12)\n\x08\x62oolList\x18\x07 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x12\x33\n\rtimestampList\x18\x08 \x01(\x0b\x32\x1a.feast.types.TimestampListH\x00\x42\x0b\n\tvalueList\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\"8\n\rTimestampList\x12\'\n\x03val\x18\x01 \x03(\x0b\x32\x1a.google.protobuf.TimestampBQ\n\x0b\x66\x65\x61st.typesB\nValueProtoZ6github.com/gojek/feast/protos/generated/go/feast/typesb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + serialized_options=_b('\n\013feast.typesB\nValueProtoZ0github.com/gojek/feast/sdk/go/protos/feast/types'), + serialized_pb=_b('\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\"\xe0\x01\n\tValueType\"\xd2\x01\n\x04\x45num\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\x0e\n\nBYTES_LIST\x10\x0b\x12\x0f\n\x0bSTRING_LIST\x10\x0c\x12\x0e\n\nINT32_LIST\x10\r\x12\x0e\n\nINT64_LIST\x10\x0e\x12\x0f\n\x0b\x44OUBLE_LIST\x10\x0f\x12\x0e\n\nFLOAT_LIST\x10\x10\x12\r\n\tBOOL_LIST\x10\x11\"\x82\x04\n\x05Value\x12\x13\n\tbytes_val\x18\x01 \x01(\x0cH\x00\x12\x14\n\nstring_val\x18\x02 \x01(\tH\x00\x12\x13\n\tint32_val\x18\x03 \x01(\x05H\x00\x12\x13\n\tint64_val\x18\x04 \x01(\x03H\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12\x12\n\x08\x62ool_val\x18\x07 \x01(\x08H\x00\x12\x30\n\x0e\x62ytes_list_val\x18\x0b \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12\x32\n\x0fstring_list_val\x18\x0c \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12\x30\n\x0eint32_list_val\x18\r \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12\x30\n\x0eint64_list_val\x18\x0e \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12\x32\n\x0f\x64ouble_list_val\x18\x0f \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12\x30\n\x0e\x66loat_list_val\x18\x10 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12.\n\rbool_list_val\x18\x11 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x42\x05\n\x03val\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\x42K\n\x0b\x66\x65\x61st.typesB\nValueProtoZ0github.com/gojek/feast/sdk/go/protos/feast/typesb\x06proto3') +) @@ -33,7 +32,7 @@ file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( - name='UNKNOWN', index=0, number=0, + name='INVALID', index=0, number=0, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( @@ -65,14 +64,38 @@ serialized_options=None, type=None), _descriptor.EnumValueDescriptor( - name='TIMESTAMP', index=8, number=8, + name='BYTES_LIST', index=8, number=11, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STRING_LIST', index=9, number=12, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INT32_LIST', index=10, number=13, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INT64_LIST', index=11, number=14, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOUBLE_LIST', index=12, number=15, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLOAT_LIST', index=13, number=16, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BOOL_LIST', index=14, number=17, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, - serialized_start=86, - serialized_end=198, + serialized_start=55, + serialized_end=265, ) _sym_db.RegisterEnumDescriptor(_VALUETYPE_ENUM) @@ -97,8 +120,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=73, - serialized_end=198, + serialized_start=41, + serialized_end=265, ) @@ -110,140 +133,99 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='bytesVal', full_name='feast.types.Value.bytesVal', index=0, + name='bytes_val', full_name='feast.types.Value.bytes_val', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='stringVal', full_name='feast.types.Value.stringVal', index=1, + name='string_val', full_name='feast.types.Value.string_val', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='int32Val', full_name='feast.types.Value.int32Val', index=2, + name='int32_val', full_name='feast.types.Value.int32_val', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='int64Val', full_name='feast.types.Value.int64Val', index=3, + name='int64_val', full_name='feast.types.Value.int64_val', index=3, number=4, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='doubleVal', full_name='feast.types.Value.doubleVal', index=4, + name='double_val', full_name='feast.types.Value.double_val', index=4, number=5, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='floatVal', full_name='feast.types.Value.floatVal', index=5, + name='float_val', full_name='feast.types.Value.float_val', index=5, number=6, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='boolVal', full_name='feast.types.Value.boolVal', index=6, + name='bool_val', full_name='feast.types.Value.bool_val', index=6, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='timestampVal', full_name='feast.types.Value.timestampVal', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='val', full_name='feast.types.Value.val', - index=0, containing_type=None, fields=[]), - ], - serialized_start=201, - serialized_end=408, -) - - -_VALUELIST = _descriptor.Descriptor( - name='ValueList', - full_name='feast.types.ValueList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bytesList', full_name='feast.types.ValueList.bytesList', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='stringList', full_name='feast.types.ValueList.stringList', index=1, - number=2, type=11, cpp_type=10, label=1, + name='bytes_list_val', full_name='feast.types.Value.bytes_list_val', index=7, + number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='int32List', full_name='feast.types.ValueList.int32List', index=2, - number=3, type=11, cpp_type=10, label=1, + name='string_list_val', full_name='feast.types.Value.string_list_val', index=8, + number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='int64List', full_name='feast.types.ValueList.int64List', index=3, - number=4, type=11, cpp_type=10, label=1, + name='int32_list_val', full_name='feast.types.Value.int32_list_val', index=9, + number=13, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='doubleList', full_name='feast.types.ValueList.doubleList', index=4, - number=5, type=11, cpp_type=10, label=1, + name='int64_list_val', full_name='feast.types.Value.int64_list_val', index=10, + number=14, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='floatList', full_name='feast.types.ValueList.floatList', index=5, - number=6, type=11, cpp_type=10, label=1, + name='double_list_val', full_name='feast.types.Value.double_list_val', index=11, + number=15, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='boolList', full_name='feast.types.ValueList.boolList', index=6, - number=7, type=11, cpp_type=10, label=1, + name='float_list_val', full_name='feast.types.Value.float_list_val', index=12, + number=16, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='timestampList', full_name='feast.types.ValueList.timestampList', index=7, - number=8, type=11, cpp_type=10, label=1, + name='bool_list_val', full_name='feast.types.Value.bool_list_val', index=13, + number=17, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, @@ -260,11 +242,11 @@ extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( - name='valueList', full_name='feast.types.ValueList.valueList', + name='val', full_name='feast.types.Value.val', index=0, containing_type=None, fields=[]), ], - serialized_start=411, - serialized_end=805, + serialized_start=268, + serialized_end=782, ) @@ -294,8 +276,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=807, - serialized_end=831, + serialized_start=784, + serialized_end=808, ) @@ -325,8 +307,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=833, - serialized_end=858, + serialized_start=810, + serialized_end=835, ) @@ -356,8 +338,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=860, - serialized_end=884, + serialized_start=837, + serialized_end=861, ) @@ -387,8 +369,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=886, - serialized_end=910, + serialized_start=863, + serialized_end=887, ) @@ -418,8 +400,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=912, - serialized_end=937, + serialized_start=889, + serialized_end=914, ) @@ -449,8 +431,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=939, - serialized_end=963, + serialized_start=916, + serialized_end=940, ) @@ -480,103 +462,62 @@ extension_ranges=[], oneofs=[ ], - serialized_start=965, - serialized_end=988, -) - - -_TIMESTAMPLIST = _descriptor.Descriptor( - name='TimestampList', - full_name='feast.types.TimestampList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='val', full_name='feast.types.TimestampList.val', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=990, - serialized_end=1046, + serialized_start=942, + serialized_end=965, ) _VALUETYPE_ENUM.containing_type = _VALUETYPE -_VALUE.fields_by_name['timestampVal'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name['bytes_list_val'].message_type = _BYTESLIST +_VALUE.fields_by_name['string_list_val'].message_type = _STRINGLIST +_VALUE.fields_by_name['int32_list_val'].message_type = _INT32LIST +_VALUE.fields_by_name['int64_list_val'].message_type = _INT64LIST +_VALUE.fields_by_name['double_list_val'].message_type = _DOUBLELIST +_VALUE.fields_by_name['float_list_val'].message_type = _FLOATLIST +_VALUE.fields_by_name['bool_list_val'].message_type = _BOOLLIST +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['bytes_val']) +_VALUE.fields_by_name['bytes_val'].containing_oneof = _VALUE.oneofs_by_name['val'] +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['string_val']) +_VALUE.fields_by_name['string_val'].containing_oneof = _VALUE.oneofs_by_name['val'] +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['int32_val']) +_VALUE.fields_by_name['int32_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['bytesVal']) -_VALUE.fields_by_name['bytesVal'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['int64_val']) +_VALUE.fields_by_name['int64_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['stringVal']) -_VALUE.fields_by_name['stringVal'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['double_val']) +_VALUE.fields_by_name['double_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['int32Val']) -_VALUE.fields_by_name['int32Val'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['float_val']) +_VALUE.fields_by_name['float_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['int64Val']) -_VALUE.fields_by_name['int64Val'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['bool_val']) +_VALUE.fields_by_name['bool_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['doubleVal']) -_VALUE.fields_by_name['doubleVal'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['bytes_list_val']) +_VALUE.fields_by_name['bytes_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['floatVal']) -_VALUE.fields_by_name['floatVal'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['string_list_val']) +_VALUE.fields_by_name['string_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['boolVal']) -_VALUE.fields_by_name['boolVal'].containing_oneof = _VALUE.oneofs_by_name['val'] + _VALUE.fields_by_name['int32_list_val']) +_VALUE.fields_by_name['int32_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] _VALUE.oneofs_by_name['val'].fields.append( - _VALUE.fields_by_name['timestampVal']) -_VALUE.fields_by_name['timestampVal'].containing_oneof = _VALUE.oneofs_by_name['val'] -_VALUELIST.fields_by_name['bytesList'].message_type = _BYTESLIST -_VALUELIST.fields_by_name['stringList'].message_type = _STRINGLIST -_VALUELIST.fields_by_name['int32List'].message_type = _INT32LIST -_VALUELIST.fields_by_name['int64List'].message_type = _INT64LIST -_VALUELIST.fields_by_name['doubleList'].message_type = _DOUBLELIST -_VALUELIST.fields_by_name['floatList'].message_type = _FLOATLIST -_VALUELIST.fields_by_name['boolList'].message_type = _BOOLLIST -_VALUELIST.fields_by_name['timestampList'].message_type = _TIMESTAMPLIST -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['bytesList']) -_VALUELIST.fields_by_name['bytesList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['stringList']) -_VALUELIST.fields_by_name['stringList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['int32List']) -_VALUELIST.fields_by_name['int32List'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['int64List']) -_VALUELIST.fields_by_name['int64List'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['doubleList']) -_VALUELIST.fields_by_name['doubleList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['floatList']) -_VALUELIST.fields_by_name['floatList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['boolList']) -_VALUELIST.fields_by_name['boolList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_VALUELIST.oneofs_by_name['valueList'].fields.append( - _VALUELIST.fields_by_name['timestampList']) -_VALUELIST.fields_by_name['timestampList'].containing_oneof = _VALUELIST.oneofs_by_name['valueList'] -_TIMESTAMPLIST.fields_by_name['val'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP + _VALUE.fields_by_name['int64_list_val']) +_VALUE.fields_by_name['int64_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['double_list_val']) +_VALUE.fields_by_name['double_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['float_list_val']) +_VALUE.fields_by_name['float_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] +_VALUE.oneofs_by_name['val'].fields.append( + _VALUE.fields_by_name['bool_list_val']) +_VALUE.fields_by_name['bool_list_val'].containing_oneof = _VALUE.oneofs_by_name['val'] DESCRIPTOR.message_types_by_name['ValueType'] = _VALUETYPE DESCRIPTOR.message_types_by_name['Value'] = _VALUE -DESCRIPTOR.message_types_by_name['ValueList'] = _VALUELIST DESCRIPTOR.message_types_by_name['BytesList'] = _BYTESLIST DESCRIPTOR.message_types_by_name['StringList'] = _STRINGLIST DESCRIPTOR.message_types_by_name['Int32List'] = _INT32LIST @@ -584,86 +525,71 @@ DESCRIPTOR.message_types_by_name['DoubleList'] = _DOUBLELIST DESCRIPTOR.message_types_by_name['FloatList'] = _FLOATLIST DESCRIPTOR.message_types_by_name['BoolList'] = _BOOLLIST -DESCRIPTOR.message_types_by_name['TimestampList'] = _TIMESTAMPLIST _sym_db.RegisterFileDescriptor(DESCRIPTOR) -ValueType = _reflection.GeneratedProtocolMessageType('ValueType', (_message.Message,), dict( - DESCRIPTOR = _VALUETYPE, - __module__ = 'feast.types.Value_pb2' +ValueType = _reflection.GeneratedProtocolMessageType('ValueType', (_message.Message,), { + 'DESCRIPTOR' : _VALUETYPE, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.ValueType) - )) + }) _sym_db.RegisterMessage(ValueType) -Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( - DESCRIPTOR = _VALUE, - __module__ = 'feast.types.Value_pb2' +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), { + 'DESCRIPTOR' : _VALUE, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.Value) - )) + }) _sym_db.RegisterMessage(Value) -ValueList = _reflection.GeneratedProtocolMessageType('ValueList', (_message.Message,), dict( - DESCRIPTOR = _VALUELIST, - __module__ = 'feast.types.Value_pb2' - # @@protoc_insertion_point(class_scope:feast.types.ValueList) - )) -_sym_db.RegisterMessage(ValueList) - -BytesList = _reflection.GeneratedProtocolMessageType('BytesList', (_message.Message,), dict( - DESCRIPTOR = _BYTESLIST, - __module__ = 'feast.types.Value_pb2' +BytesList = _reflection.GeneratedProtocolMessageType('BytesList', (_message.Message,), { + 'DESCRIPTOR' : _BYTESLIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.BytesList) - )) + }) _sym_db.RegisterMessage(BytesList) -StringList = _reflection.GeneratedProtocolMessageType('StringList', (_message.Message,), dict( - DESCRIPTOR = _STRINGLIST, - __module__ = 'feast.types.Value_pb2' +StringList = _reflection.GeneratedProtocolMessageType('StringList', (_message.Message,), { + 'DESCRIPTOR' : _STRINGLIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.StringList) - )) + }) _sym_db.RegisterMessage(StringList) -Int32List = _reflection.GeneratedProtocolMessageType('Int32List', (_message.Message,), dict( - DESCRIPTOR = _INT32LIST, - __module__ = 'feast.types.Value_pb2' +Int32List = _reflection.GeneratedProtocolMessageType('Int32List', (_message.Message,), { + 'DESCRIPTOR' : _INT32LIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.Int32List) - )) + }) _sym_db.RegisterMessage(Int32List) -Int64List = _reflection.GeneratedProtocolMessageType('Int64List', (_message.Message,), dict( - DESCRIPTOR = _INT64LIST, - __module__ = 'feast.types.Value_pb2' +Int64List = _reflection.GeneratedProtocolMessageType('Int64List', (_message.Message,), { + 'DESCRIPTOR' : _INT64LIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.Int64List) - )) + }) _sym_db.RegisterMessage(Int64List) -DoubleList = _reflection.GeneratedProtocolMessageType('DoubleList', (_message.Message,), dict( - DESCRIPTOR = _DOUBLELIST, - __module__ = 'feast.types.Value_pb2' +DoubleList = _reflection.GeneratedProtocolMessageType('DoubleList', (_message.Message,), { + 'DESCRIPTOR' : _DOUBLELIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.DoubleList) - )) + }) _sym_db.RegisterMessage(DoubleList) -FloatList = _reflection.GeneratedProtocolMessageType('FloatList', (_message.Message,), dict( - DESCRIPTOR = _FLOATLIST, - __module__ = 'feast.types.Value_pb2' +FloatList = _reflection.GeneratedProtocolMessageType('FloatList', (_message.Message,), { + 'DESCRIPTOR' : _FLOATLIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.FloatList) - )) + }) _sym_db.RegisterMessage(FloatList) -BoolList = _reflection.GeneratedProtocolMessageType('BoolList', (_message.Message,), dict( - DESCRIPTOR = _BOOLLIST, - __module__ = 'feast.types.Value_pb2' +BoolList = _reflection.GeneratedProtocolMessageType('BoolList', (_message.Message,), { + 'DESCRIPTOR' : _BOOLLIST, + '__module__' : 'feast.types.Value_pb2' # @@protoc_insertion_point(class_scope:feast.types.BoolList) - )) + }) _sym_db.RegisterMessage(BoolList) -TimestampList = _reflection.GeneratedProtocolMessageType('TimestampList', (_message.Message,), dict( - DESCRIPTOR = _TIMESTAMPLIST, - __module__ = 'feast.types.Value_pb2' - # @@protoc_insertion_point(class_scope:feast.types.TimestampList) - )) -_sym_db.RegisterMessage(TimestampList) - DESCRIPTOR._options = None # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/types/Value_pb2.pyi b/sdk/python/feast/types/Value_pb2.pyi new file mode 100644 index 00000000000..d8b8a73dd36 --- /dev/null +++ b/sdk/python/feast/types/Value_pb2.pyi @@ -0,0 +1,260 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class ValueType(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class Enum(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> ValueType.Enum: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[ValueType.Enum]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, ValueType.Enum]]: ... + INVALID = typing___cast(ValueType.Enum, 0) + BYTES = typing___cast(ValueType.Enum, 1) + STRING = typing___cast(ValueType.Enum, 2) + INT32 = typing___cast(ValueType.Enum, 3) + INT64 = typing___cast(ValueType.Enum, 4) + DOUBLE = typing___cast(ValueType.Enum, 5) + FLOAT = typing___cast(ValueType.Enum, 6) + BOOL = typing___cast(ValueType.Enum, 7) + BYTES_LIST = typing___cast(ValueType.Enum, 11) + STRING_LIST = typing___cast(ValueType.Enum, 12) + INT32_LIST = typing___cast(ValueType.Enum, 13) + INT64_LIST = typing___cast(ValueType.Enum, 14) + DOUBLE_LIST = typing___cast(ValueType.Enum, 15) + FLOAT_LIST = typing___cast(ValueType.Enum, 16) + BOOL_LIST = typing___cast(ValueType.Enum, 17) + INVALID = typing___cast(ValueType.Enum, 0) + BYTES = typing___cast(ValueType.Enum, 1) + STRING = typing___cast(ValueType.Enum, 2) + INT32 = typing___cast(ValueType.Enum, 3) + INT64 = typing___cast(ValueType.Enum, 4) + DOUBLE = typing___cast(ValueType.Enum, 5) + FLOAT = typing___cast(ValueType.Enum, 6) + BOOL = typing___cast(ValueType.Enum, 7) + BYTES_LIST = typing___cast(ValueType.Enum, 11) + STRING_LIST = typing___cast(ValueType.Enum, 12) + INT32_LIST = typing___cast(ValueType.Enum, 13) + INT64_LIST = typing___cast(ValueType.Enum, 14) + DOUBLE_LIST = typing___cast(ValueType.Enum, 15) + FLOAT_LIST = typing___cast(ValueType.Enum, 16) + BOOL_LIST = typing___cast(ValueType.Enum, 17) + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ValueType: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class Value(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + bytes_val = ... # type: bytes + string_val = ... # type: typing___Text + int32_val = ... # type: int + int64_val = ... # type: int + double_val = ... # type: float + float_val = ... # type: float + bool_val = ... # type: bool + + @property + def bytes_list_val(self) -> BytesList: ... + + @property + def string_list_val(self) -> StringList: ... + + @property + def int32_list_val(self) -> Int32List: ... + + @property + def int64_list_val(self) -> Int64List: ... + + @property + def double_list_val(self) -> DoubleList: ... + + @property + def float_list_val(self) -> FloatList: ... + + @property + def bool_list_val(self) -> BoolList: ... + + def __init__(self, + *, + bytes_val : typing___Optional[bytes] = None, + string_val : typing___Optional[typing___Text] = None, + int32_val : typing___Optional[int] = None, + int64_val : typing___Optional[int] = None, + double_val : typing___Optional[float] = None, + float_val : typing___Optional[float] = None, + bool_val : typing___Optional[bool] = None, + bytes_list_val : typing___Optional[BytesList] = None, + string_list_val : typing___Optional[StringList] = None, + int32_list_val : typing___Optional[Int32List] = None, + int64_list_val : typing___Optional[Int64List] = None, + double_list_val : typing___Optional[DoubleList] = None, + float_list_val : typing___Optional[FloatList] = None, + bool_list_val : typing___Optional[BoolList] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Value: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",u"bool_val",u"bytes_list_val",u"bytes_val",u"double_list_val",u"double_val",u"float_list_val",u"float_val",u"int32_list_val",u"int32_val",u"int64_list_val",u"int64_val",u"string_list_val",u"string_val",u"val"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_list_val",u"bool_val",u"bytes_list_val",u"bytes_val",u"double_list_val",u"double_val",u"float_list_val",u"float_val",u"int32_list_val",u"int32_val",u"int64_list_val",u"int64_val",u"string_list_val",u"string_val",u"val"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",b"bool_list_val",u"bool_val",b"bool_val",u"bytes_list_val",b"bytes_list_val",u"bytes_val",b"bytes_val",u"double_list_val",b"double_list_val",u"double_val",b"double_val",u"float_list_val",b"float_list_val",u"float_val",b"float_val",u"int32_list_val",b"int32_list_val",u"int32_val",b"int32_val",u"int64_list_val",b"int64_list_val",u"int64_val",b"int64_val",u"string_list_val",b"string_list_val",u"string_val",b"string_val",u"val",b"val"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_list_val",b"bool_list_val",u"bool_val",b"bool_val",u"bytes_list_val",b"bytes_list_val",u"bytes_val",b"bytes_val",u"double_list_val",b"double_list_val",u"double_val",b"double_val",u"float_list_val",b"float_list_val",u"float_val",b"float_val",u"int32_list_val",b"int32_list_val",u"int32_val",b"int32_val",u"int64_list_val",b"int64_list_val",u"int64_val",b"int64_val",u"string_list_val",b"string_list_val",u"string_val",b"string_val",u"val",b"val"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"val",b"val"]) -> typing_extensions___Literal["bytes_val","string_val","int32_val","int64_val","double_val","float_val","bool_val","bytes_list_val","string_list_val","int32_list_val","int64_list_val","double_list_val","float_list_val","bool_list_val"]: ... + +class BytesList(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[bytes]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> BytesList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class StringList(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> StringList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class Int32List(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[int]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Int32List: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class Int64List(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[int]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Int64List: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class DoubleList(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[float]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DoubleList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class FloatList(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[float]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FloatList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... + +class BoolList(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bool] + + def __init__(self, + *, + val : typing___Optional[typing___Iterable[bool]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> BoolList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"val",b"val"]) -> None: ... diff --git a/sdk/python/tests/sdk/utils/test_gs_utils.py b/sdk/python/feast/value_type.py similarity index 59% rename from sdk/python/tests/sdk/utils/test_gs_utils.py rename to sdk/python/feast/value_type.py index 5e5a2e13bd1..5e77bd8f456 100644 --- a/sdk/python/tests/sdk/utils/test_gs_utils.py +++ b/sdk/python/feast/value_type.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Feast Authors +# Copyright 2019 The Feast Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,10 +12,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -from feast.sdk.utils.gs_utils import is_gs_path +import enum -def test_is_gs_path(): - assert is_gs_path("gs://valid/gs/file.csv") == True - assert is_gs_path("local/path/file.csv") == False +class ValueType(enum.Enum): + """ + Feature value type + """ + + UNKNOWN = 0 + BYTES = 1 + STRING = 2 + INT32 = 3 + INT64 = 4 + DOUBLE = 5 + FLOAT = 6 + BOOL = 7 + BYTES_LIST = 11 + STRING_LIST = 12 + INT32_LIST = 13 + INT64_LIST = 14 + DOUBLE_LIST = 15 + FLOAT_LIST = 16 + BOOL_LIST = 17 diff --git a/sdk/python/feast/version.py b/sdk/python/feast/version.py deleted file mode 100644 index 3b548742844..00000000000 --- a/sdk/python/feast/version.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -VERSION = '0.1.0.post0' \ No newline at end of file diff --git a/sdk/python/requirements-test.txt b/sdk/python/requirements-test.txt index b97a8770cd5..29f42ebd66b 100644 --- a/sdk/python/requirements-test.txt +++ b/sdk/python/requirements-test.txt @@ -1,14 +1,19 @@ -google-api-core>=1.7.0 -google-auth>=1.6.0 -google-cloud-bigquery>=1.8.0 -google-cloud-storage>=1.13.0 +google-api-core==1.* +google-auth==1.* +google-cloud-bigquery==1.* +google-cloud-bigquery-storage==0.* +google-cloud-storage==1.* google-resumable-media==0.3.1 -googleapis-common-protos>=1.5.5 -grpcio>=1.16.1 +googleapis-common-protos==1.* +grpcio==1.* numpy -pandas>=0.24.0 -protobuf>=3.0.0 +mock==2.0.0 +pandas==0.* +protobuf==3.* pytest pytest-mock -PyYAML -fastavro>=0.21.23 \ No newline at end of file +pytest-timeout +PyYAML==5.1.2 +fastavro==0.21.* +grpcio-testing==1.* +pytest-ordering==0.6.* \ No newline at end of file diff --git a/sdk/python/setup.py b/sdk/python/setup.py index a8b0c7cdc81..f7ec4b1e511 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Feast Authors +# Copyright 2019 The Feast Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,25 +14,37 @@ import imp import os -from setuptools import find_packages, setup, Command + +from setuptools import find_packages, setup NAME = "feast" DESCRIPTION = "Python sdk for Feast" URL = "https://github.com/gojek/feast" AUTHOR = "Feast" REQUIRES_PYTHON = ">=3.6.0" -VERSION = imp.load_source("feast.version", os.path.join("feast", "version.py")).VERSION +VERSION = "0.3.0" + REQUIRED = [ - "google-api-core>=1.7.0", - "google-auth>=1.6.0", - "google-cloud-bigquery>=1.8.0", - "google-cloud-storage>=1.13.0", - "googleapis-common-protos>=1.5.5", - "grpcio>=1.16.1", - "pandas>=0.24.0", - "protobuf>=3.0.0", - "PyYAML", - "fastavro>=0.21.19" + "click>=7.0", + "google-api-core==1.*", + "google-auth==1.*", + "google-cloud-bigquery==1.*", + "google-cloud-storage==1.20.*", + "google-cloud-core==1.0.3", + "googleapis-common-protos==1.*", + "google-cloud-bigquery-storage==0.*", + "grpcio==1.*", + "pandas==0.*", + "pandavro==1.5.1", + "protobuf==3.10.*", + "PyYAML==5.1.2", + "fastavro==0.*", + "kafka-python==1.4.*", + "tabulate==0.8.*", + "toml==0.10.0", + "tqdm==4.*", + "numpy", + "google", ] setup( @@ -44,6 +56,9 @@ url=URL, packages=find_packages(exclude=("tests",)), install_requires=REQUIRED, + # https://stackoverflow.com/questions/28509965/setuptools-development-requirements + # Install dev requirements with: pip install -e .[dev] + extras_require={"dev": ["mypy-protobuf==1.*", "grpcio-testing==1.*"]}, include_package_data=True, license="Apache", classifiers=[ @@ -54,4 +69,5 @@ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", ], + entry_points={"console_scripts": ["feast=cli:cli"]}, ) diff --git a/sdk/python/tests/data/driver_features.csv b/sdk/python/tests/data/driver_features.csv index 03b54a8a39a..14f1bb57741 100644 --- a/sdk/python/tests/data/driver_features.csv +++ b/sdk/python/tests/data/driver_features.csv @@ -1,11 +1,11 @@ -driver_id,ts,completed,avg_distance_completed,avg_customer_distance_completed,avg_distance_cancelled -1,2018-09-25T00:00:00.000,12,1.102,172,-1 -2,2018-09-25T00:00:00.000,23,8.16,783,15.619 -3,2018-09-25T00:00:00.000,14,2.83328571428571,138.142857142857,18.5935 -4,2018-09-25T00:00:00.000,7,4.593,1575,-1 -5,2018-09-25T00:00:00.000,15,11.7656,314,6.733 -6,2018-09-25T00:00:00.000,7,14.2352142857143,276.785714285714,3.1645 -7,2018-09-25T00:00:00.000,10,5.0266,421.1,-1 -8,2018-09-25T00:00:00.000,6,23.554,4639,-1 -9,2018-09-25T00:00:00.000,14,7.42042857142857,149.888888888889,1.371 -10,2018-09-25T00:00:00.000,4,-1,-1,17.088 \ No newline at end of file +drivers,datetime,completed,avg_distance_completed,avg_customer_distance_completed,avg_distance_cancelled +1,1567328272,12,1.102,172,-1 +2,1567328873,23,8.16,783,15.619 +3,1567328874,14,2.83328571428571,138.142857142857,18.5935 +4,1567328875,7,4.593,1575,-1 +5,1567328876,15,11.7656,314,6.733 +6,1567328877,7,14.2352142857143,276.785714285714,3.1645 +7,1567328878,10,5.0266,421.1,-1 +8,1567328879,6,23.554,4639,-1 +9,1567328880,14,7.42042857142857,149.888888888889,1.371 +10,156733881,4,-1,-1,17.088 \ No newline at end of file diff --git a/sdk/python/tests/dataframes.py b/sdk/python/tests/dataframes.py new file mode 100644 index 00000000000..35477a7302f --- /dev/null +++ b/sdk/python/tests/dataframes.py @@ -0,0 +1,125 @@ +import pandas as pd +import pytz +import numpy as np +from datetime import datetime + + +GOOD = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "entity_id": [1001, 1002, 1004], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": ["string1", "string2", "string3"], + "feature_3": [1, 2, 5], + } +) + +GOOD_FIVE_FEATURES = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "entity_id": [1001, 1002, 1004], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": ["string1", "string2", "string3"], + "feature_3": [1, 2, 5], + "feature_4": [1, 2, 5], + "feature_5": [1, 2, 5], + } +) + +GOOD_FIVE_FEATURES_TWO_ENTITIES = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "entity_1_id": [1001, 1002, 1004], + "entity_2_id": ["1001", "1002", "1003"], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": ["string1", "string2", "string3"], + "feature_3": [1, 2, 5], + "feature_4": [1, 2, 5], + "feature_5": [1.3, 1.3, 1.3], + } +) + + +BAD_NO_ENTITY = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": [0.3, 0.3, 0.34], + "feature_3": [1, 2, 5], + } +) + +NO_FEATURES = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "entity_id": [1001, 1002, 1004], + } +) + +BAD_NO_DATETIME = pd.DataFrame( + { + "entity_id": [1001, 1002, 1004], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": [0.3, 0.3, 0.34], + "feature_3": [1, 2, 5], + } +) + +BAD_INCORRECT_DATETIME_TYPE = pd.DataFrame( + { + "datetime": [1.23, 3.23, 1.23], + "entity_id": [1001, 1002, 1004], + "feature_1": [0.2, 0.4, 0.5], + "feature_2": [0.3, 0.3, 0.34], + "feature_3": [1, 2, 5], + } +) + +ALL_TYPES = pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], + "user_id": [1001, 1002, 1004], + "int32_feature": [np.int32(1), np.int32(2), np.int32(3)], + "int64_feature": [np.int64(1), np.int64(2), np.int64(3)], + "float_feature": [np.float(0.1), np.float(0.2), np.float(0.3)], + "double_feature": [np.float64(0.1), np.float64(0.2), np.float64(0.3)], + "string_feature": ["one", "two", "three"], + "bytes_feature": [b"one", b"two", b"three"], + "bool_feature": [True, False, False], + "int32_list_feature": [ + np.array([1, 2, 3, 4], dtype=np.int32), + np.array([1, 2, 3, 4], dtype=np.int32), + np.array([1, 2, 3, 4], dtype=np.int32), + ], + "int64_list_feature": [ + np.array([1, 2, 3, 4], dtype=np.int64), + np.array([1, 2, 3, 4], dtype=np.int64), + np.array([1, 2, 3, 4], dtype=np.int64), + ], + "float_list_feature": [ + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + ], + "double_list_feature": [ + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + ], + "string_list_feature": [ + np.array(["one", "two", "three"]), + np.array(["one", "two", "three"]), + np.array(["one", "two", "three"]), + ], + "bytes_list_feature": [ + np.array([b"one", b"two", b"three"]), + np.array([b"one", b"two", b"three"]), + np.array([b"one", b"two", b"three"]), + ], + "bool_list_feature": [ + np.array([True, False, True]), + np.array([True, False, True]), + np.array([True, False, True]), + ], + } +) diff --git a/sdk/python/tests/fake_kafka.py b/sdk/python/tests/fake_kafka.py new file mode 100644 index 00000000000..c511ed1d278 --- /dev/null +++ b/sdk/python/tests/fake_kafka.py @@ -0,0 +1,21 @@ +import queue +from typing import Dict + + +class FakeKafka: + def __init__(self): + self._messages = dict() # type: Dict[str, queue.Queue] + + def send(self, topic, message): + if topic not in self._messages: + self._messages[topic] = queue.Queue() + self._messages[topic].queue.append(message) + + def get(self, topic: str): + message = None + if self._messages[topic]: + message = self._messages[topic].get(block=False) + return message + + def flush(self, timeout): + return True diff --git a/sdk/python/tests/feast_core_server.py b/sdk/python/tests/feast_core_server.py new file mode 100644 index 00000000000..45ffc0a0a9e --- /dev/null +++ b/sdk/python/tests/feast_core_server.py @@ -0,0 +1,94 @@ +from concurrent import futures +import time +import logging +import grpc +import feast.core.CoreService_pb2_grpc as Core +from feast.core.CoreService_pb2 import ( + GetFeastCoreVersionResponse, + ApplyFeatureSetResponse, + ApplyFeatureSetRequest, + ListFeatureSetsResponse, + ListFeatureSetsRequest, +) +from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpec +from feast.core.Source_pb2 import ( + SourceType as SourceTypeProto, + KafkaSourceConfig as KafkaSourceConfigProto, +) +from typing import List + +_logger = logging.getLogger(__name__) + +_ONE_DAY_IN_SECONDS = 60 * 60 * 24 + + +class CoreServicer(Core.CoreServiceServicer): + def __init__(self): + self._feature_sets = dict() + + def GetFeastCoreVersion(self, request, context): + return GetFeastCoreVersionResponse(version="0.3.0") + + def ListFeatureSets(self, request: ListFeatureSetsRequest, context): + + filtered_feature_set_response = [ + fs + for fs in list(self._feature_sets.values()) + if ( + not request.filter.feature_set_name + or fs.name == request.filter.feature_set_name + ) + and ( + not request.filter.feature_set_version + or str(fs.version) == request.filter.feature_set_version + ) + ] + + return ListFeatureSetsResponse(feature_sets=filtered_feature_set_response) + + def ApplyFeatureSet(self, request: ApplyFeatureSetRequest, context): + feature_set = request.feature_set + + if feature_set.version is None: + feature_set.version = 1 + else: + feature_set.version = feature_set.version + 1 + + if feature_set.source.type == SourceTypeProto.INVALID: + feature_set.source.kafka_source_config.CopyFrom( + KafkaSourceConfigProto(bootstrap_servers="server.com", topic="topic1") + ) + feature_set.source.type = SourceTypeProto.KAFKA + + self._feature_sets[feature_set.name] = feature_set + + _logger.info( + "registered feature set " + + feature_set.name + + " with " + + str(len(feature_set.entities)) + + " entities and " + + str(len(feature_set.features)) + + " features" + ) + + return ApplyFeatureSetResponse( + feature_set=feature_set, status=ApplyFeatureSetResponse.Status.CREATED + ) + + +def serve(): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + Core.add_CoreServiceServicer_to_server(CoreServicer(), server) + server.add_insecure_port("[::]:50051") + server.start() + try: + while True: + time.sleep(_ONE_DAY_IN_SECONDS) + except KeyboardInterrupt: + server.stop(0) + + +if __name__ == "__main__": + logging.basicConfig() + serve() diff --git a/sdk/python/tests/feast_serving_server.py b/sdk/python/tests/feast_serving_server.py new file mode 100644 index 00000000000..bf40e7edaf4 --- /dev/null +++ b/sdk/python/tests/feast_serving_server.py @@ -0,0 +1,182 @@ +from concurrent import futures +import time +import logging + +import grpc +import threading +import feast.serving.ServingService_pb2_grpc as Serving +from feast.serving.ServingService_pb2 import ( + GetBatchFeaturesResponse, + GetOnlineFeaturesRequest, + GetOnlineFeaturesResponse, + GetFeastServingInfoResponse, +) +import fake_kafka +from typing import Dict +import sqlite3 +from feast.core.CoreService_pb2_grpc import CoreServiceStub +from feast.core.CoreService_pb2 import ( + ListFeatureSetsResponse, + ListStoresRequest, + ListStoresResponse, +) +from feast.core import FeatureSet_pb2 as FeatureSetProto +import stores +from feast.types import ( + FeatureRow_pb2 as FeatureRowProto, + Field_pb2 as FieldProto, + Value_pb2 as ValueProto, +) +from google.protobuf.timestamp_pb2 import Timestamp + +_ONE_DAY_IN_SECONDS = 60 * 60 * 24 + + +class ServingServicer(Serving.ServingServiceServicer): + def __init__(self, kafka: fake_kafka = None, core_url: str = None): + if kafka and core_url: + self.__core_channel = None + self.__connect_core(core_url) + self._feature_sets = ( + dict() + ) # type: Dict[str, FeatureSetProto.FeatureSetSpec] + self._kafka = kafka + self._store = stores.SQLiteDatabase() + + thread = threading.Thread(target=self.__consume, args=()) + thread.daemon = True + thread.start() + + def __connect_core(self, core_url: str): + if not core_url: + raise ValueError("Please set Feast Core URL.") + + if self.__core_channel is None: + self.__core_channel = grpc.insecure_channel(core_url) + + try: + grpc.channel_ready_future(self.__core_channel).result(timeout=5) + except grpc.FutureTimeoutError: + raise ConnectionError( + "connection timed out while attempting to connect to Feast Core gRPC server " + + core_url + ) + else: + self._core_service_stub = CoreServiceStub(self.__core_channel) + + def __get_feature_sets_from_core(self): + # Get updated list of feature sets + feature_sets = ( + self._core_service_stub.ListFeatureSets + ) # type: ListFeatureSetsResponse + + # Store each feature set locally + for feature_set in list(feature_sets.feature_sets): + self._feature_sets[feature_set.name] = feature_set + + def __consume(self): + """ + Consume message in the background from Fake Kafka + """ + while True: + self.__get_feature_sets_from_core() + self.__register_feature_sets_with_store() + for feature_set in list(self._feature_sets.values()): + message = self._kafka.get(feature_set.source.kafka_source_config.topic) + if message is None: + break + self._store.upsert_feature_row(feature_set, message) + time.sleep(1) + + def __register_feature_sets_with_store(self): + for feature_set in list(self._feature_sets.values()): + self._store.register_feature_set(feature_set) + + def GetFeastServingVersion(self, request, context): + return GetFeastServingInfoResponse(version="0.3.0") + + def GetOnlineFeatures(self, request: GetOnlineFeaturesRequest, context): + + response = GetOnlineFeaturesResponse( + feature_data_sets=[ + GetOnlineFeaturesResponse.FeatureDataSet( + name="feature_set_1", + version="1", + feature_rows=[ + FeatureRowProto.FeatureRow( + feature_set="feature_set_1", + event_timestamp=Timestamp(), + fields=[ + FieldProto.Field( + name="feature_1", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_2", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_3", + value=ValueProto.Value(float_val=1.2), + ), + ], + ), + FeatureRowProto.FeatureRow( + feature_set="feature_set_1", + event_timestamp=Timestamp(), + fields=[ + FieldProto.Field( + name="feature_1", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_2", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_3", + value=ValueProto.Value(float_val=1.2), + ), + ], + ), + FeatureRowProto.FeatureRow( + feature_set="feature_set_1", + event_timestamp=Timestamp(), + fields=[ + FieldProto.Field( + name="feature_1", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_2", + value=ValueProto.Value(float_val=1.2), + ), + FieldProto.Field( + name="feature_3", + value=ValueProto.Value(float_val=1.2), + ), + ], + ), + ], + ) + ] + ) + + return response + + +def serve(): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + Serving.add_ServingServiceServicer_to_server(ServingServicer(), server) + server.add_insecure_port("[::]:50052") + server.start() + try: + while True: + time.sleep(_ONE_DAY_IN_SECONDS) + except KeyboardInterrupt: + server.stop(0) + + +if __name__ == "__main__": + logging.basicConfig() + serve() diff --git a/sdk/python/tests/sample/valid_entity.yaml b/sdk/python/tests/sample/valid_entity.yaml deleted file mode 100644 index 97f215711f0..00000000000 --- a/sdk/python/tests/sample/valid_entity.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name : myentity -description: test entity with tag -tags: - - tag1 - - tag2 \ No newline at end of file diff --git a/sdk/python/tests/sample/valid_entity_no_tag.yaml b/sdk/python/tests/sample/valid_entity_no_tag.yaml deleted file mode 100644 index 828a768b291..00000000000 --- a/sdk/python/tests/sample/valid_entity_no_tag.yaml +++ /dev/null @@ -1,2 +0,0 @@ -name : myentity -description: test entity without tag \ No newline at end of file diff --git a/sdk/python/tests/sample/valid_feature.yaml b/sdk/python/tests/sample/valid_feature.yaml deleted file mode 100644 index a54915641bc..00000000000 --- a/sdk/python/tests/sample/valid_feature.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: myentity.feature_bool_redis1 -name: feature_bool_redis1 -entity: myentity -owner: bob@example.com -description: test entity. -valueType: BOOL -uri: https://github.com/bob/example -dataStores: - serving: - id: REDIS1 - warehouse: - id: BIGQUERY1 \ No newline at end of file diff --git a/sdk/python/tests/sample/valid_feature_group.yaml b/sdk/python/tests/sample/valid_feature_group.yaml deleted file mode 100644 index f877d209fec..00000000000 --- a/sdk/python/tests/sample/valid_feature_group.yaml +++ /dev/null @@ -1,7 +0,0 @@ -id: my_fg -tags: ["tag1", "tag2"] -dataStores: - serving: - id: "REDIS1" - warehouse: - id: "BIGQUERY1" \ No newline at end of file diff --git a/sdk/python/tests/sample/valid_storage.yaml b/sdk/python/tests/sample/valid_storage.yaml deleted file mode 100644 index 96ae9e40f78..00000000000 --- a/sdk/python/tests/sample/valid_storage.yaml +++ /dev/null @@ -1,6 +0,0 @@ -id: BIGQUERY1 -type: bigquery -options: - dataset: "feast" - project: "gcp-project" - tempLocation: "gs://feast-storage" \ No newline at end of file diff --git a/sdk/python/tests/sdk/resources/test_entity.py b/sdk/python/tests/sdk/resources/test_entity.py deleted file mode 100644 index 65a10caa1da..00000000000 --- a/sdk/python/tests/sdk/resources/test_entity.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.sdk.resources.entity import Entity -import os - - -class TestEntity(object): - def test_read_from_yaml(self): - entity_no_tag = Entity.from_yaml( - "tests/sample/valid_entity_no_tag.yaml") - assert entity_no_tag.name == "myentity" - assert entity_no_tag.description == "test entity without tag" - assert len(entity_no_tag.tags) == 0 - - entity = Entity.from_yaml("tests/sample/valid_entity.yaml") - assert entity.name == "myentity" - assert entity.description == "test entity with tag" - assert entity.tags[0] == "tag1" - assert entity.tags[1] == "tag2" - - def test_dump(self): - entity = Entity("entity", "description", ["tag1", "tag2"]) - entity.dump("myentity.yaml") - actual = Entity.from_yaml("myentity.yaml") - assert actual.name == entity.name - assert actual.description == entity.description - for t1, t2 in zip(actual.tags, entity.tags): - assert t1 == t2 - - #cleanup - os.remove("myentity.yaml") diff --git a/sdk/python/tests/sdk/resources/test_feature.py b/sdk/python/tests/sdk/resources/test_feature.py deleted file mode 100644 index 7f296db2357..00000000000 --- a/sdk/python/tests/sdk/resources/test_feature.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.sdk.resources.feature import Feature, Datastore, ValueType - - -class TestFeature(object): - def dummy_feature(self): - warehouse_data_store = Datastore(id="BIGQUERY1", options={}) - serving_data_store = Datastore(id="REDIS1", options={}) - my_feature = Feature( - name="my_feature", - entity="my_entity", - value_type=ValueType.BYTES, - owner="feast@web.com", - description="test feature", - uri="github.com/feature_repo", - warehouse_store=warehouse_data_store, - serving_store=serving_data_store) - return my_feature - - def test_set_name(self): - my_feature = self.dummy_feature() - new_name = "my_feature_new" - my_feature.name = new_name - assert my_feature.name == new_name - assert my_feature.id == "my_entity.my_feature_new" - - def test_set_entity(self): - my_feature = self.dummy_feature() - new_entity = "new_entity" - my_feature.entity = new_entity - assert my_feature.entity == new_entity - assert my_feature.id == "new_entity.my_feature" - - def test_read_from_yaml(self): - feature = Feature.from_yaml("tests/sample/valid_feature.yaml") - assert feature.id == "myentity.feature_bool_redis1" - assert feature.name == "feature_bool_redis1" - assert feature.entity == "myentity" - assert feature.owner == "bob@example.com" - assert feature.description == "test entity." - assert feature.value_type == ValueType.BOOL - assert feature.uri == "https://github.com/bob/example" - assert feature.serving_store.id == "REDIS1" - assert feature.warehouse_store.id == "BIGQUERY1" diff --git a/sdk/python/tests/sdk/resources/test_feature_group.py b/sdk/python/tests/sdk/resources/test_feature_group.py deleted file mode 100644 index 244211d1924..00000000000 --- a/sdk/python/tests/sdk/resources/test_feature_group.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.sdk.resources.feature_group import FeatureGroup - - -class TestFeatureGroupSpec(object): - def test_read_from_yaml(self): - feature_group = FeatureGroup.from_yaml( - "tests/sample/valid_feature_group.yaml") - assert feature_group.id == "my_fg" - assert feature_group.serving_store.id == "REDIS1" - assert feature_group.warehouse_store.id == "BIGQUERY1" - assert feature_group.tags == ["tag1", "tag2"] diff --git a/sdk/python/tests/sdk/resources/test_feature_set.py b/sdk/python/tests/sdk/resources/test_feature_set.py deleted file mode 100644 index 5972ecd0d14..00000000000 --- a/sdk/python/tests/sdk/resources/test_feature_set.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest -from feast.sdk.resources.feature_set import FeatureSet, DatasetInfo - - -class TestFeatureSet(object): - def test_features(self): - entity_name = "driver" - features = ["driver.feature1", "driver.feature2"] - - feature_set = FeatureSet(entity_name, features) - assert len(feature_set.features) == 2 - - assert len(set(feature_set.features) & set(features)) == 2 - - def test_different_entity(self): - entity_name = "driver" - features = ["customer.feature1", "driver.feature1"] - with pytest.raises( - ValueError, - match="feature set has different entity: customer"): - FeatureSet(entity_name, features) - - -class TestDatasetInfo(object): - def test_creation(self): - name = "dataset_name" - full_table_id = "gcp-project.dataset.table_name" - dataset = DatasetInfo(name, full_table_id) - assert dataset.name == name - assert dataset.full_table_id == full_table_id diff --git a/sdk/python/tests/sdk/resources/test_storage.py b/sdk/python/tests/sdk/resources/test_storage.py deleted file mode 100644 index 75b9d1a8f8a..00000000000 --- a/sdk/python/tests/sdk/resources/test_storage.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from feast.sdk.resources.storage import Storage -import os - - -class TestStorage(object): - def test_update_options(self): - storage = Storage(id="storage1", type="redis") - assert storage.options == {} - myDict = {'key': 'value'} - storage.options = myDict - assert storage.options == myDict - - def test_from_yaml(self): - storage = Storage.from_yaml("tests/sample/valid_storage.yaml") - assert storage.id == "BIGQUERY1" - assert storage.type == "bigquery" - expDict = { - "dataset": "feast", - "project": "gcp-project", - "tempLocation": "gs://feast-storage" - } - assert storage.options == expDict - - def test_dump(self): - opt = {"option1": "value1", "option2": "value2"} - storage = Storage("storage1", "redis", opt) - - storage.dump("storage.yaml") - storage2 = Storage.from_yaml("storage.yaml") - - assert storage.id == storage2.id - assert storage.type == storage2.type - assert storage.options == storage2.options - assert storage2.options == opt - - #cleanup - os.remove("storage.yaml") diff --git a/sdk/python/tests/sdk/test_client.py b/sdk/python/tests/sdk/test_client.py deleted file mode 100644 index 9e76cde301d..00000000000 --- a/sdk/python/tests/sdk/test_client.py +++ /dev/null @@ -1,507 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime - -import grpc -import numpy as np -import pandas as pd -import pytest -from google.protobuf.timestamp_pb2 import Timestamp -from pandas.util.testing import assert_frame_equal - -import feast.core.CoreService_pb2_grpc as core -import feast.core.DatasetService_pb2_grpc as training -import feast.core.JobService_pb2_grpc as jobs -import feast.serving.Serving_pb2 as serving_pb -from feast.core.CoreService_pb2 import CoreServiceTypes -from feast.core.DatasetService_pb2 import DatasetInfo as DatasetInfo_pb -from feast.core.DatasetService_pb2 import DatasetServiceTypes -from feast.core.JobService_pb2 import JobServiceTypes -from feast.sdk.client import Client, _parse_date, _timestamp_from_datetime -from feast.sdk.importer import Importer -from feast.sdk.resources.entity import Entity -from feast.sdk.resources.feature import Feature -from feast.sdk.resources.feature_group import FeatureGroup -from feast.sdk.resources.feature_set import FeatureSet, DatasetInfo, FileType -from feast.sdk.resources.storage import Storage -from feast.sdk.utils.bq_util import TableDownloader -from feast.serving.Serving_pb2 import QueryFeaturesRequest, \ - QueryFeaturesResponse, FeatureValue -from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore -from feast.specs.ImportSpec_pb2 import ImportSpec -from feast.specs.StorageSpec_pb2 import StorageSpec -from feast.types.Value_pb2 import Value - - -class TestClient(object): - @pytest.fixture - def client(self, mocker): - cli = Client(core_url="some.uri", serving_url="some.serving.uri") - mocker.patch.object(cli, '_connect_core') - mocker.patch.object(cli, '_connect_serving') - return cli - - def test_apply_single_feature(self, client, mocker): - my_feature = Feature(name="test", entity="test") - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - with mocker.patch.object( - grpc_stub, - 'ApplyFeature', - return_value=CoreServiceTypes.ApplyFeatureResponse( - featureId="test.test")): - client._core_service_stub = grpc_stub - id = client.apply(my_feature) - assert id == "test.test" - - def test_apply_single_entity(self, client, mocker): - my_entity = Entity(name="test") - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - with mocker.patch.object( - grpc_stub, - 'ApplyEntity', - return_value=CoreServiceTypes.ApplyEntityResponse( - entityName="test")): - client._core_service_stub = grpc_stub - name = client.apply(my_entity) - assert name == "test" - - def test_apply_single_feature_group(self, client, mocker): - my_feature_group = FeatureGroup(id="test") - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - with mocker.patch.object( - grpc_stub, - 'ApplyFeatureGroup', - return_value=CoreServiceTypes.ApplyFeatureGroupResponse( - featureGroupId="test")): - client._core_service_stub = grpc_stub - name = client.apply(my_feature_group) - assert name == "test" - - def test_apply_single_storage(self, client, mocker): - my_storage = Storage(id="TEST", type="redis") - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - with mocker.patch.object( - grpc_stub, - 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")): - client._core_service_stub = grpc_stub - name = client.apply(my_storage) - assert name == "TEST" - - def test_apply_unsupported_object(self, client): - with pytest.raises(TypeError) as e_info: - client.apply(None) - assert e_info.__str__() == "Apply can only be passed one of the" \ - + "following types: [Feature, Entity, FeatureGroup, Storage, Importer]" - - def test_apply_multiple(self, client, mocker): - my_storage = Storage(id="TEST", type="redis") - my_feature_group = FeatureGroup(id="test") - my_entity = Entity(name="test") - - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - mocker.patch.object( - grpc_stub, - 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")) - mocker.patch.object( - grpc_stub, - 'ApplyFeatureGroup', - return_value=CoreServiceTypes.ApplyFeatureGroupResponse( - featureGroupId="test")) - mocker.patch.object( - grpc_stub, - 'ApplyEntity', - return_value=CoreServiceTypes.ApplyEntityResponse( - entityName="test")) - - client._core_service_stub = grpc_stub - ids = client.apply([my_storage, my_entity, my_feature_group]) - assert ids == ["TEST", "test", "test"] - - def test_run_job_no_staging(self, client, mocker): - grpc_stub = jobs.JobServiceStub(grpc.insecure_channel("")) - - mocker.patch.object( - grpc_stub, - 'SubmitJob', - return_value=JobServiceTypes.SubmitImportJobResponse( - jobId="myjob12312")) - client._job_service_stub = grpc_stub - importer = Importer({"import": ImportSpec()}, None, - {"require_staging": False}) - - job_id = client.run(importer) - assert job_id == "myjob12312" - - def test_create_dataset_invalid_args(self, client): - feature_set = FeatureSet("entity", ["entity.feature1"]) - # empty feature set - with pytest.raises(ValueError, match="feature set is empty"): - inv_feature_set = FeatureSet("entity", []) - client.create_dataset(inv_feature_set, "2018-12-01", "2018-12-02") - # invalid start date - with pytest.raises( - ValueError, - match="Incorrect date format, should be YYYY-MM-DD"): - client.create_dataset(feature_set, "20181201", "2018-12-02") - # invalid end date - with pytest.raises( - ValueError, - match="Incorrect date format, should be YYYY-MM-DD"): - client.create_dataset(feature_set, "2018-12-01", "20181202") - # start date > end date - with pytest.raises(ValueError, match="end_date is before start_date"): - client.create_dataset(feature_set, "2018-12-02", "2018-12-01") - # invalid limit - with pytest.raises( - ValueError, match="limit is not a positive integer"): - client.create_dataset(feature_set, "2018-12-01", "2018-12-02", -1) - - with pytest.raises(ValueError, match="filters is not dictionary"): - client.create_dataset(feature_set, "2018-12-01", "2018-12-02", - 10, filters="filter") - - def test_create_dataset(self, client, mocker): - entity_name = "myentity" - feature_ids = ["myentity.feature1", "myentity.feature2"] - fs = FeatureSet(entity_name, feature_ids) - start_date = "2018-01-02" - end_date = "2018-12-31" - - ds_pb = DatasetInfo_pb( - name="dataset_name", tableUrl="project.dataset.table") - - mock_trn_stub = training.DatasetServiceStub(grpc.insecure_channel("")) - mocker.patch.object( - mock_trn_stub, - "CreateDataset", - return_value=DatasetServiceTypes.CreateDatasetResponse( - datasetInfo=ds_pb)) - client._dataset_service_stub = mock_trn_stub - - ds = client.create_dataset(fs, start_date, end_date) - - assert "dataset_name" == ds.name - assert "project.dataset.table" == ds.full_table_id - mock_trn_stub.CreateDataset.assert_called_once_with( - DatasetServiceTypes.CreateDatasetRequest( - featureSet=fs.proto, - startDate=_timestamp_from_datetime(_parse_date(start_date)), - endDate=_timestamp_from_datetime(_parse_date(end_date)), - limit=None, - namePrefix=None)) - - def test_create_dataset_with_filters(self, client, mocker): - entity_name = "myentity" - feature_ids = ["myentity.feature1", "myentity.feature2"] - fs = FeatureSet(entity_name, feature_ids) - start_date = "2018-01-02" - end_date = "2018-12-31" - - ds_pb = DatasetInfo_pb( - name="dataset_name", tableUrl="project.dataset.table") - - mock_trn_stub = training.DatasetServiceStub(grpc.insecure_channel("")) - mocker.patch.object( - mock_trn_stub, - "CreateDataset", - return_value=DatasetServiceTypes.CreateDatasetResponse( - datasetInfo=ds_pb)) - client._dataset_service_stub = mock_trn_stub - - job_filter = {"job_id": 12345} - ds = client.create_dataset(fs, start_date, end_date, - filters=job_filter) - - assert "dataset_name" == ds.name - assert "project.dataset.table" == ds.full_table_id - mock_trn_stub.CreateDataset.assert_called_once_with( - DatasetServiceTypes.CreateDatasetRequest( - featureSet=fs.proto, - startDate=_timestamp_from_datetime(_parse_date(start_date)), - endDate=_timestamp_from_datetime(_parse_date(end_date)), - limit=None, - namePrefix=None, - filters={"job_id": "12345"})) - - - def test_create_dataset_with_limit(self, client, mocker): - entity_name = "myentity" - feature_ids = ["myentity.feature1", "myentity.feature2"] - fs = FeatureSet(entity_name, feature_ids) - start_date = "2018-01-02" - end_date = "2018-12-31" - limit = 100 - - ds_pb = DatasetInfo_pb( - name="dataset_name", tableUrl="project.dataset.table") - - mock_trn_stub = training.DatasetServiceStub(grpc.insecure_channel("")) - mocker.patch.object( - mock_trn_stub, - "CreateDataset", - return_value=DatasetServiceTypes.CreateDatasetResponse( - datasetInfo=ds_pb)) - client._dataset_service_stub = mock_trn_stub - - ds = client.create_dataset(fs, start_date, end_date, limit=limit) - - assert "dataset_name" == ds.name - assert "project.dataset.table" == ds.full_table_id - mock_trn_stub.CreateDataset.assert_called_once_with( - DatasetServiceTypes.CreateDatasetRequest( - featureSet=fs.proto, - startDate=_timestamp_from_datetime(_parse_date(start_date)), - endDate=_timestamp_from_datetime(_parse_date(end_date)), - limit=limit, - namePrefix=None)) - - def test_create_dataset_with_name_prefix(self, client, mocker): - entity_name = "myentity" - feature_ids = ["myentity.feature1", "myentity.feature2"] - fs = FeatureSet(entity_name, feature_ids) - start_date = "2018-01-02" - end_date = "2018-12-31" - limit = 100 - name_prefix = "feast" - - ds_pb = DatasetInfo_pb( - name="dataset_name", tableUrl="project.dataset.table") - - mock_dssvc_stub = training.DatasetServiceStub( - grpc.insecure_channel("")) - mocker.patch.object( - mock_dssvc_stub, - "CreateDataset", - return_value=DatasetServiceTypes.CreateDatasetResponse( - datasetInfo=ds_pb)) - client._dataset_service_stub = mock_dssvc_stub - - ds = client.create_dataset( - fs, start_date, end_date, limit=limit, name_prefix=name_prefix) - - assert "dataset_name" == ds.name - assert "project.dataset.table" == ds.full_table_id - mock_dssvc_stub.CreateDataset.assert_called_once_with( - DatasetServiceTypes.CreateDatasetRequest( - featureSet=fs.proto, - startDate=_timestamp_from_datetime(_parse_date(start_date)), - endDate=_timestamp_from_datetime(_parse_date(end_date)), - limit=limit, - namePrefix=name_prefix)) - - def test_build_serving_request(self, client): - feature_set = FeatureSet("entity", ["entity.feat1", "entity.feat2"]) - - req = client._build_serving_request(feature_set, ["1", "2", "3"]) - expected = QueryFeaturesRequest( - entityName="entity", - entityId=["1", "2", "3"], - featureId=feature_set.features) - assert req == expected - - def test_serving_response_to_df(self, client): - response = self._create_query_features_response( - entity_name="entity", - entities={ - "1": { - "entity.feat1": (1, Timestamp(seconds=1)), - "entity.feat2": (2, Timestamp(seconds=2)) - }, - "2": { - "entity.feat1": (3, Timestamp(seconds=3)), - "entity.feat2": (4, Timestamp(seconds=4)) - } - }) - expected_df = pd.DataFrame({'entity': ["1", "2"], - 'entity.feat1': [1, 3], - 'entity.feat2': [2, 4]}) \ - .reset_index(drop=True) - df = client._response_to_df(FeatureSet("entity", ["entity.feat1", - "entity.feat2"]), - response) \ - .sort_values(['entity']) \ - .reset_index(drop=True)[expected_df.columns] - assert_frame_equal( - df, - expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_response_to_df_with_missing_value(self, client): - response = self._create_query_features_response( - entity_name="entity", - entities={ - "1": { - "entity.feat1": (1, Timestamp(seconds=1)) - }, - "2": { - "entity.feat1": (3, Timestamp(seconds=3)), - "entity.feat2": (4, Timestamp(seconds=4)) - } - }) - expected_df = pd.DataFrame({'entity': ["1", "2"], - 'entity.feat1': [1, 3], - 'entity.feat2': [np.nan, 4]}) \ - .reset_index(drop=True) - df = client._response_to_df(FeatureSet("entity", ["entity.feat1", - "entity.feat2"]), - response) \ - .sort_values(['entity']) \ - .reset_index(drop=True)[expected_df.columns] - assert_frame_equal( - df, - expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_response_to_df_with_missing_feature(self, client): - response = self._create_query_features_response( - entity_name="entity", - entities={ - "1": { - "entity.feat1": (1, Timestamp(seconds=1)) - }, - "2": { - "entity.feat1": (3, Timestamp(seconds=3)) - } - }) - expected_df = pd.DataFrame({'entity': ["1", "2"], - 'entity.feat1': [1, 3], - 'entity.feat2': [np.NaN, np.NaN]}) \ - .reset_index(drop=True) - df = client._response_to_df(FeatureSet("entity", ["entity.feat1", - "entity.feat2"]), - response) \ - .sort_values(['entity']) \ - .reset_index(drop=True)[expected_df.columns] - assert_frame_equal( - df, - expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_response_to_df_no_data(self, client): - response = QueryFeaturesResponse(entityName="entity") - expected_df = pd.DataFrame( - columns=['entity', 'entity.feat1', 'entity.feat2']) - df = client._response_to_df( - FeatureSet("entity", ["entity.feat1", "entity.feat2"]), response) - assert_frame_equal( - df, - expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_response_to_df_with_time_filter(self, client): - response = self._create_query_features_response( - entity_name="entity", - entities={ - "1": { - "entity.feat1": (1, Timestamp(seconds=1)) - }, - "2": { - "entity.feat1": (3, Timestamp(seconds=3)) - } - }) - expected_df = pd.DataFrame({'entity': ["1", "2"], - 'entity.feat1': [np.NaN, 3], - 'entity.feat2': [np.NaN, np.NaN]}) \ - .reset_index(drop=True) - start = datetime.utcfromtimestamp(2) - end = datetime.utcfromtimestamp(5) - df = client._response_to_df(FeatureSet("entity", ["entity.feat1", - "entity.feat2"]), - response, start, end) \ - .sort_values(['entity']) \ - .reset_index(drop=True)[expected_df.columns] - assert_frame_equal( - df, - expected_df, - check_dtype=False, - check_column_type=False, - check_index_type=False) - - def test_serving_invalid_type(self, client): - start = "2018-01-01T01:01:01" - end = "2018-01-01T01:01:01" - ts_range = [start, end] - with pytest.raises( - TypeError, match="start and end must be datetime " - "type"): - client.get_serving_data( - FeatureSet("entity", ["entity.feat1", "entity.feat2"]), - ["1234", "5678"], ts_range) - - def test_download_dataset_as_file(self, client, mocker): - destination = "/tmp/dest_file" - - table_dlder = TableDownloader() - mocker.patch.object( - table_dlder, "download_table_as_file", return_value=destination) - - client._table_downloader = table_dlder - full_table_id = "project.dataset.table" - staging_location = "gs://gcs_bucket/" - dataset = DatasetInfo("mydataset", full_table_id) - - result = client.download_dataset( - dataset, - destination, - staging_location=staging_location, - file_type=FileType.CSV) - - assert result == destination - table_dlder.download_table_as_file.assert_called_once_with( - full_table_id, destination, staging_location, FileType.CSV) - - def _create_query_features_response(self, entity_name, entities): - response = QueryFeaturesResponse(entityName=entity_name) - for entity_id, feature_map in entities.items(): - feature = {} - for feature_id, feature_value in feature_map.items(): - feature[feature_id] = FeatureValue( - value=Value(int32Val=feature_value[0]), - timestamp=feature_value[1]) - entity_pb = serving_pb.Entity(features=feature) - response.entities[entity_id].CopyFrom(entity_pb) - - return response - - def _create_feature_spec(self, feature_id, wh_id): - wh_store = DataStore(id=wh_id) - datastores = DataStores(warehouse=wh_store) - return FeatureSpec(id=feature_id, dataStores=datastores) - - def _create_bq_spec(self, id, project, dataset): - return StorageSpec( - id=id, - type="bigquery", - options={ - "project": project, - "dataset": dataset - }) diff --git a/sdk/python/tests/sdk/test_importer.py b/sdk/python/tests/sdk/test_importer.py deleted file mode 100644 index 6a0c9b07848..00000000000 --- a/sdk/python/tests/sdk/test_importer.py +++ /dev/null @@ -1,283 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pandas as pd -import pytest -import ntpath -from feast.sdk.resources.feature import Feature, ValueType, \ - Datastore -from feast.sdk.importer import _create_feature, Importer -from feast.sdk.utils.gs_utils import is_gs_path - - -class TestImporter(object): - def test_from_csv(self): - csv_path = "tests/data/driver_features.csv" - entity_name = "driver" - owner = "owner@feast.com" - staging_location = "gs://test-bucket" - id_column = "driver_id" - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" - ] - timestamp_column = "ts" - - importer = Importer.from_csv( - path=csv_path, - entity=entity_name, - owner=owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns, - timestamp_column=timestamp_column) - - self._validate_csv_importer(importer, csv_path, entity_name, - owner, staging_location, id_column, - feature_columns, timestamp_column) - - def test_from_csv_id_column_not_specified(self): - with pytest.raises( - ValueError, match="Column with name driver is not found"): - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" - ] - csv_path = "tests/data/driver_features.csv" - Importer.from_csv( - path=csv_path, - entity="driver", - owner="owner@feast.com", - staging_location="gs://test-bucket", - feature_columns=feature_columns, - timestamp_column="ts") - - def test_from_csv_timestamp_column_not_specified(self): - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed", - "avg_distance_cancelled" - ] - csv_path = "tests/data/driver_features.csv" - entity_name = "driver" - owner = "owner@feast.com" - staging_location = "gs://test-bucket" - id_column = "driver_id" - importer = Importer.from_csv( - path=csv_path, - entity=entity_name, - owner=owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns) - - self._validate_csv_importer( - importer, - csv_path, - entity_name, - owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns) - - def test_from_csv_feature_columns_not_specified(self): - csv_path = "tests/data/driver_features.csv" - entity_name = "driver" - owner = "owner@feast.com" - staging_location = "gs://test-bucket" - id_column = "driver_id" - timestamp_column = "ts" - importer = Importer.from_csv( - path=csv_path, - entity=entity_name, - owner=owner, - staging_location=staging_location, - id_column=id_column, - timestamp_column=timestamp_column) - - self._validate_csv_importer( - importer, - csv_path, - entity_name, - owner, - staging_location=staging_location, - id_column=id_column, - timestamp_column=timestamp_column) - - def test_from_csv_staging_location_not_specified(self): - with pytest.raises( - ValueError, - match= - "Specify staging_location for importing local file/dataframe"): - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" - ] - csv_path = "tests/data/driver_features.csv" - Importer.from_csv( - path=csv_path, - entity="driver", - owner="owner@feast.com", - feature_columns=feature_columns, - timestamp_column="ts") - - with pytest.raises( - ValueError, match="Staging location must be in GCS") as e_info: - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" - ] - csv_path = "tests/data/driver_features.csv" - Importer.from_csv( - path=csv_path, - entity="driver", - owner="owner@feast.com", - staging_location="/home", - feature_columns=feature_columns, - timestamp_column="ts") - - def test_from_df(self): - csv_path = "tests/data/driver_features.csv" - df = pd.read_csv(csv_path) - staging_location = "gs://test-bucket" - entity = "driver" - - importer = Importer.from_df( - df=df, - entity=entity, - owner="owner@feast.com", - staging_location=staging_location, - id_column="driver_id", - timestamp_column="ts") - - assert importer.require_staging == True - assert ("{}/tmp_{}".format(staging_location, - entity) in importer.remote_path) - for feature in importer.features.values(): - assert feature.name in df.columns - assert feature.id == "driver." + feature.name - - import_spec = importer.spec - assert import_spec.type == "file.csv" - assert import_spec.sourceOptions == { - "path": importer.remote_path - } - assert import_spec.entities == ["driver"] - - schema = import_spec.schema - assert schema.entityIdColumn == "driver_id" - assert schema.timestampValue is not None - feature_columns = [ - "completed", "avg_distance_completed", - "avg_customer_distance_completed", "avg_distance_cancelled" - ] - for col, field in zip(df.columns.values, schema.fields): - assert col == field.name - if col in feature_columns: - assert field.featureId == "driver." + col - - def test_stage_df_without_timestamp(self, mocker): - mocker.patch("feast.sdk.importer.df_to_gcs", return_value=True) - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed", - "avg_distance_cancelled" - ] - csv_path = "tests/data/driver_features.csv" - entity_name = "driver" - owner = "owner@feast.com" - staging_location = "gs://test-bucket" - id_column = "driver_id" - importer = Importer.from_csv( - path=csv_path, - entity=entity_name, - owner=owner, - staging_location=staging_location, - id_column=id_column, - feature_columns=feature_columns) - - importer.stage() - - def _validate_csv_importer(self, - importer, - csv_path, - entity_name, - owner, - staging_location=None, - id_column=None, - feature_columns=None, - timestamp_column=None, - timestamp_value=None): - df = pd.read_csv(csv_path) - assert not importer.require_staging == is_gs_path(csv_path) - if importer.require_staging: - assert importer.remote_path == "{}/{}".format( - staging_location, ntpath.basename(csv_path)) - - # check features created - for feature in importer.features.values(): - assert feature.name in df.columns - assert feature.id == "{}.{}".format(entity_name, feature.name) - - import_spec = importer.spec - assert import_spec.type == "file.csv" - path = importer.remote_path if importer.require_staging else csv_path - assert import_spec.sourceOptions == {"path": path} - assert import_spec.entities == [entity_name] - - schema = import_spec.schema - assert schema.entityIdColumn == id_column if id_column is not None else entity_name - if timestamp_column is not None: - assert schema.timestampColumn == timestamp_column - elif timestamp_value is not None: - assert schema.timestampValue == timestamp_value - - if feature_columns is None: - feature_columns = list(df.columns.values) - feature_columns.remove(id_column) - feature_columns.remove(timestamp_column) - - # check schema's field - for col, field in zip(df.columns.values, schema.fields): - assert col == field.name - if col in feature_columns: - assert field.featureId == '{}.{}'.format(entity_name, - col).lower() - - - -class TestHelpers: - def test_create_feature(self): - col = pd.Series([1] * 3, dtype='int32', name="test") - expected = Feature( - name="test", - entity="test", - owner="person", - value_type=ValueType.INT32) - actual = _create_feature(col, "test", "person", None, None) - assert actual.id == expected.id - assert actual.value_type == expected.value_type - assert actual.owner == expected.owner - - def test_create_feature_with_stores(self): - col = pd.Series([1] * 3, dtype='int32', name="test") - expected = Feature( - name="test", - entity="test", - owner="person", - value_type=ValueType.INT32, - serving_store=Datastore(id="SERVING"), - warehouse_store=Datastore(id="WAREHOUSE")) - actual = _create_feature(col, "test", "person", - Datastore(id="SERVING"), - Datastore(id="WAREHOUSE")) - assert actual.id == expected.id - assert actual.value_type == expected.value_type - assert actual.owner == expected.owner - assert actual.serving_store == expected.serving_store - assert actual.warehouse_store == expected.warehouse_store diff --git a/sdk/python/tests/sdk/utils/test_bq_utils.py b/sdk/python/tests/sdk/utils/test_bq_utils.py deleted file mode 100644 index 3fde64aee5a..00000000000 --- a/sdk/python/tests/sdk/utils/test_bq_utils.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import fastavro -import pandas as pd -import pytest -from google.cloud.bigquery.table import Table -from google.cloud.exceptions import NotFound - -from feast.sdk.resources.feature_set import FileType -from feast.sdk.utils.bq_util import TableDownloader, get_table_name, query_to_dataframe -from feast.specs.StorageSpec_pb2 import StorageSpec - -testdata_path = os.path.abspath(os.path.join(__file__, "..", "..", "..", "data")) - - -def test_get_table_name(): - project_name = "my_project" - dataset_name = "my_dataset" - feature_id = "myentity.feature1" - storage_spec = StorageSpec( - id="BIGQUERY1", - type="bigquery", - options={"project": project_name, "dataset": dataset_name}, - ) - assert ( - get_table_name(feature_id, storage_spec) - == "my_project.my_dataset.myentity" - ) - - -def test_get_table_name_not_bq(): - feature_id = "myentity.feature1" - storage_spec = StorageSpec(id="REDIS1", type="redis") - with pytest.raises(ValueError, match="storage spec is not BigQuery storage spec"): - get_table_name(feature_id, storage_spec) - - -@pytest.mark.skipif( - os.getenv("SKIP_BIGQUERY_TEST") is not None, - reason="SKIP_BIGQUERY_TEST is set in the environment", -) -def test_query_to_dataframe(): - with open( - os.path.join(testdata_path, "austin_bikeshare.bikeshare_stations.avro"), "rb" - ) as expected_file: - avro_reader = fastavro.reader(expected_file) - expected = pd.DataFrame.from_records(avro_reader) - - query = "SELECT * FROM `bigquery-public-data.austin_bikeshare.bikeshare_stations`" - actual = query_to_dataframe(query) - assert expected.equals(actual) - - -@pytest.mark.skipif( - os.getenv("SKIP_BIGQUERY_TEST") is not None, - reason="SKIP_BIGQUERY_TEST is set in the environment", -) -def test_query_to_dataframe_for_non_existing_dataset(): - query = "SELECT * FROM `bigquery-public-data.this_dataset_should_not_exists.bikeshare_stations`" - with pytest.raises(NotFound): - query_to_dataframe(query) - - -class TestTableDownloader(object): - def test_download_table_as_df(self, mocker): - self._stop_time(mocker) - mocked_gcs_to_df = mocker.patch( - "feast.sdk.utils.bq_util.gcs_to_df", return_value=None - ) - - staging_path = "gs://temp/" - staging_file_name = "temp_0" - full_table_id = "project_id.dataset_id.table_id" - - table_dldr = TableDownloader() - exp_staging_path = os.path.join(staging_path, staging_file_name) - - table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object(table_dldr._bq, "extract_table", return_value=_Job()) - - table_dldr.download_table_as_df(full_table_id, staging_location=staging_path) - - assert len(table_dldr._bq.extract_table.call_args_list) == 1 - args, kwargs = table_dldr._bq.extract_table.call_args_list[0] - assert args[0].full_table_id == Table.from_string(full_table_id).full_table_id - assert args[1] == exp_staging_path - assert kwargs["job_config"].destination_format == "CSV" - mocked_gcs_to_df.assert_called_once_with(exp_staging_path) - - def test_download_csv(self, mocker): - self._stop_time(mocker) - self._test_download_file(mocker, FileType.CSV) - - def test_download_avro(self, mocker): - self._stop_time(mocker) - self._test_download_file(mocker, FileType.AVRO) - - def test_download_json(self, mocker): - self._stop_time(mocker) - self._test_download_file(mocker, FileType.JSON) - - def test_download_invalid_staging_url(self): - full_table_id = "project_id.dataset_id.table_id" - table_dldr = TableDownloader() - with pytest.raises( - ValueError, match="staging_uri must be a directory in " "GCS" - ): - table_dldr.download_table_as_file( - full_table_id, "/tmp/dst", "/local/directory", FileType.CSV - ) - - with pytest.raises( - ValueError, match="staging_uri must be a directory in " "GCS" - ): - table_dldr.download_table_as_df(full_table_id, "/local/directory") - - def _test_download_file(self, mocker, type): - staging_path = "gs://temp/" - staging_file_name = "temp_0" - dst_path = "/tmp/myfile.csv" - full_table_id = "project_id.dataset_id.table_id" - - table_dldr = TableDownloader() - mock_blob = _Blob() - mocker.patch.object(mock_blob, "download_to_filename") - table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object(table_dldr._bq, "extract_table", return_value=_Job()) - table_dldr._gcs = _Mock_GCS_Client() - mocker.patch.object( - table_dldr._gcs, "get_bucket", return_value=_Bucket(mock_blob) - ) - - table_dldr.download_table_as_file( - full_table_id, dst_path, staging_location=staging_path, file_type=type - ) - - exp_staging_path = os.path.join(staging_path, staging_file_name) - assert len(table_dldr._bq.extract_table.call_args_list) == 1 - args, kwargs = table_dldr._bq.extract_table.call_args_list[0] - assert args[0].full_table_id == Table.from_string(full_table_id).full_table_id - assert args[1] == exp_staging_path - assert kwargs["job_config"].destination_format == str(type) - - mock_blob.download_to_filename.assert_called_once_with(dst_path) - - def _stop_time(self, mocker): - mocker.patch("time.time", return_value=0) - - -class _Mock_BQ_Client: - def extract_table(self): - pass - - -class _Mock_GCS_Client: - def get_bucket(self): - pass - - -class _Job: - def result(self): - return None - - -class _Bucket: - def __init__(self, blob): - self._blob = blob - - def blob(self, name): - return self._blob - - -class _Blob: - def download_to_filename(self, filename): - pass diff --git a/sdk/python/tests/sdk/utils/test_types.py b/sdk/python/tests/sdk/utils/test_types.py deleted file mode 100644 index 021e51e9eb8..00000000000 --- a/sdk/python/tests/sdk/utils/test_types.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest -import pandas as pd -import numpy as np - -from feast.sdk.utils.types import dtype_to_value_type, ValueType - - -def test_convert_dtype_to_value_type(): - dft = pd.DataFrame( - dict( - A=np.random.rand(3), - B=1, - C='foo', - D=pd.Timestamp('20010102'), - E=pd.Series([1.0] * 3).astype('float32'), - F=False, - G=pd.Series([1] * 3, dtype='int8'))) - - assert dtype_to_value_type(dft['A'].dtype) == ValueType.DOUBLE - assert dtype_to_value_type(dft['B'].dtype) == ValueType.INT64 - assert dtype_to_value_type(dft['C'].dtype) == ValueType.STRING - assert dtype_to_value_type(dft['D'].dtype) == ValueType.TIMESTAMP - assert dtype_to_value_type(dft['E'].dtype) == ValueType.FLOAT - assert dtype_to_value_type(dft['F'].dtype) == ValueType.BOOL - assert dtype_to_value_type(dft['G'].dtype) == ValueType.INT32 diff --git a/sdk/python/tests/stores.py b/sdk/python/tests/stores.py new file mode 100644 index 00000000000..e511cb1db19 --- /dev/null +++ b/sdk/python/tests/stores.py @@ -0,0 +1,98 @@ +from feast.types import FeatureRow_pb2 as FeatureRowProto +from feast.core import FeatureSet_pb2 as FeatureSetProto +import sqlite3 +from typing import Dict, List +from feast.entity import Entity +from feast.value_type import ValueType +from feast.feature_set import FeatureSet, Feature + +from feast.types import ( + FeatureRow_pb2 as FeatureRowProto, + Field_pb2 as FieldProto, + Value_pb2 as ValueProto, +) +from google.protobuf.timestamp_pb2 import Timestamp + + +class Database: + pass + + +class SQLiteDatabase(Database): + def __init__(self): + self._conn = sqlite3.connect(":memory:") + self._c = self._conn.cursor() + + def register_feature_set(self, feature_set: FeatureSetProto.FeatureSetSpec): + query = build_sqlite_create_table_query(feature_set) + print(query) + self._c.execute(query) + self._c.execute("SELECT name FROM sqlite_master WHERE type='table';") + + available_table = self._c.fetchall() + print(available_table) + + def upsert_feature_row( + self, + feature_set: FeatureSetProto.FeatureSetSpec, + feature_row: FeatureRowProto.FeatureRow, + ): + values = (feature_row.event_timestamp,) + for entity in list(feature_set.entities): + values = values + (get_feature_row_value_by_name(feature_row, entity.name),) + values = values + (feature_row.SerializeToString(),) + self._c.execute(build_sqlite_insert_feature_row_query(feature_set), values) + + +def build_sqlite_create_table_query(feature_set: FeatureSetProto.FeatureSetSpec): + query = ( + """ + CREATE TABLE IF NOT EXISTS {} ( + {} + PRIMARY KEY ({}) + ); + """ + ).format( + get_table_name(feature_set), + " ".join([column + " text NOT NULL," for column in get_columns(feature_set)]), + ", ".join( + get_columns(feature_set)[1:] + ), # exclude event_timestamp column for online stores + ) + # Hyphens become three underscores + query = query.replace("-", "___") + return query + + +def build_sqlite_insert_feature_row_query(feature_set: FeatureSetProto.FeatureSetSpec): + return """ + INSERT OR REPLACE INTO {} ({}) + VALUES(?,?,?,?,?,?) + """.format( + get_table_name(feature_set), ",".join(get_columns(feature_set)) + ) + + +def get_columns(feature_set: FeatureSetProto.FeatureSetSpec) -> List[str]: + return ( + ["event_timestamp"] + + [field.name for field in list(feature_set.entities)] + + ["value"] + ) + + +def get_feature_row_value_by_name(feature_row, name): + values = [field.value for field in list(feature_row.fields) if field.name == name] + if len(values) != 1: + raise Exception( + "Invalid number of features with name {} in feature row {}".format( + name, feature_row.name + ) + ) + return values[0] + + +def get_table_name(feature_set: FeatureSetProto.FeatureSetSpec) -> str: + if not feature_set.name and not feature_set.version: + raise ValueError("Feature set name or version is missing") + return (feature_set.name + "_" + str(feature_set.version)).replace("-", "___") diff --git a/sdk/python/tests/test_client.py b/sdk/python/tests/test_client.py new file mode 100644 index 00000000000..a2457a50927 --- /dev/null +++ b/sdk/python/tests/test_client.py @@ -0,0 +1,454 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import datetime + +import tempfile +import grpc +import pandas as pd +from google.protobuf.duration_pb2 import Duration +from mock import MagicMock +from pytz import timezone +from pandavro import to_avro +import feast.core.CoreService_pb2_grpc as Core +import feast.serving.ServingService_pb2_grpc as Serving +from feast.feature_set import FeatureSet +from feast.entity import Entity +from feast.feature_set import Feature +from feast.source import KafkaSource +from feast.core.FeatureSet_pb2 import FeatureSetSpec, FeatureSpec, EntitySpec +from feast.core.Source_pb2 import SourceType, KafkaSourceConfig, Source +from feast.core.CoreService_pb2 import ( + GetFeastCoreVersionResponse, + ListFeatureSetsResponse, + GetFeatureSetResponse, + GetFeatureSetRequest, +) +from feast.serving.ServingService_pb2 import ( + GetFeastServingInfoResponse, + GetOnlineFeaturesResponse, + GetOnlineFeaturesRequest, + GetBatchFeaturesResponse, + Job as BatchFeaturesJob, + JobType, + JobStatus, + DataFormat, + GetJobResponse, + FeastServingType, +) +import pytest +from feast.client import Client +from concurrent import futures +from feast_core_server import CoreServicer +from feast_serving_server import ServingServicer +from feast.types import Value_pb2 as ValueProto +from feast.value_type import ValueType +from feast.job import Job +import dataframes + +CORE_URL = "core.feast.example.com" +SERVING_URL = "serving.example.com" + + +class TestClient: + @pytest.fixture(scope="function") + def core_server(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + Core.add_CoreServiceServicer_to_server(CoreServicer(), server) + server.add_insecure_port("[::]:50051") + server.start() + yield server + server.stop(0) + + @pytest.fixture(scope="function") + def serving_server(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + Serving.add_ServingServiceServicer_to_server(ServingServicer(), server) + server.add_insecure_port("[::]:50052") + server.start() + yield server + server.stop(0) + + @pytest.fixture + def mock_client(self, mocker): + client = Client(core_url=CORE_URL, serving_url=SERVING_URL) + mocker.patch.object(client, "_connect_core") + mocker.patch.object(client, "_connect_serving") + return client + + @pytest.fixture + def client(self, core_server, serving_server): + return Client(core_url="localhost:50051", serving_url="localhost:50052") + + def test_version(self, mock_client, mocker): + mock_client._core_service_stub = Core.CoreServiceStub(grpc.insecure_channel("")) + mock_client._serving_service_stub = Serving.ServingServiceStub( + grpc.insecure_channel("") + ) + + mocker.patch.object( + mock_client._core_service_stub, + "GetFeastCoreVersion", + return_value=GetFeastCoreVersionResponse(version="0.3.0"), + ) + + mocker.patch.object( + mock_client._serving_service_stub, + "GetFeastServingInfo", + return_value=GetFeastServingInfoResponse(version="0.3.0"), + ) + + status = mock_client.version() + assert ( + status["core"]["url"] == CORE_URL + and status["core"]["version"] == "0.3.0" + and status["serving"]["url"] == SERVING_URL + and status["serving"]["version"] == "0.3.0" + ) + + def test_get_online_features(self, mock_client, mocker): + ROW_COUNT = 300 + + mock_client._serving_service_stub = Serving.ServingServiceStub( + grpc.insecure_channel("") + ) + + fields = dict() + for feature_num in range(1, 10): + fields["feature_set_1:1:feature_" + str(feature_num)] = ValueProto.Value( + int64_val=feature_num + ) + field_values = GetOnlineFeaturesResponse.FieldValues(fields=fields) + + response = GetOnlineFeaturesResponse() + entity_rows = [] + for row_number in range(1, ROW_COUNT + 1): + response.field_values.append(field_values) + entity_rows.append( + GetOnlineFeaturesRequest.EntityRow( + fields={"customer_id": ValueProto.Value(int64_val=row_number)} + ) + ) + + mocker.patch.object( + mock_client._serving_service_stub, + "GetOnlineFeatures", + return_value=response, + ) + + response = mock_client.get_online_features( + entity_rows=entity_rows, + feature_ids=[ + "feature_set_1:1:feature_1", + "feature_set_1:1:feature_2", + "feature_set_1:1:feature_3", + "feature_set_1:1:feature_4", + "feature_set_1:1:feature_5", + "feature_set_1:1:feature_6", + "feature_set_1:1:feature_7", + "feature_set_1:1:feature_8", + "feature_set_1:1:feature_9", + ], + ) # type: GetOnlineFeaturesResponse + + assert ( + response.field_values[0].fields["feature_set_1:1:feature_1"].int64_val == 1 + and response.field_values[0].fields["feature_set_1:1:feature_9"].int64_val + == 9 + ) + + def test_get_feature_set(self, mock_client, mocker): + mock_client._core_service_stub = Core.CoreServiceStub(grpc.insecure_channel("")) + + from google.protobuf.duration_pb2 import Duration + + mocker.patch.object( + mock_client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse( + feature_set=FeatureSetSpec( + name="my_feature_set", + version=2, + max_age=Duration(seconds=3600), + features=[ + FeatureSpec( + name="my_feature_1", value_type=ValueProto.ValueType.FLOAT + ), + FeatureSpec( + name="my_feature_2", value_type=ValueProto.ValueType.FLOAT + ), + ], + entities=[ + EntitySpec( + name="my_entity_1", value_type=ValueProto.ValueType.INT64 + ) + ], + source=Source( + type=SourceType.KAFKA, + kafka_source_config=KafkaSourceConfig( + bootstrap_servers="localhost:9092", topic="topic" + ), + ), + ) + ), + ) + + feature_set = mock_client.get_feature_set("my_feature_set", version=2) + + assert ( + feature_set.name == "my_feature_set" + and feature_set.version == 2 + and feature_set.fields["my_feature_1"].name == "my_feature_1" + and feature_set.fields["my_feature_1"].dtype == ValueType.FLOAT + and feature_set.fields["my_entity_1"].name == "my_entity_1" + and feature_set.fields["my_entity_1"].dtype == ValueType.INT64 + and len(feature_set.features) == 2 + and len(feature_set.entities) == 1 + ) + + def test_get_batch_features(self, mock_client, mocker): + + mock_client._serving_service_stub = Serving.ServingServiceStub( + grpc.insecure_channel("") + ) + mock_client._core_service_stub = Core.CoreServiceStub(grpc.insecure_channel("")) + + mocker.patch.object( + mock_client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse( + feature_set=FeatureSetSpec( + name="customer_fs", + version=1, + entities=[ + EntitySpec( + name="customer", value_type=ValueProto.ValueType.INT64 + ), + EntitySpec( + name="transaction", value_type=ValueProto.ValueType.INT64 + ), + ], + features=[ + FeatureSpec( + name="customer_feature_1", + value_type=ValueProto.ValueType.FLOAT, + ), + FeatureSpec( + name="customer_feature_2", + value_type=ValueProto.ValueType.STRING, + ), + ], + ) + ), + ) + + expected_dataframe = pd.DataFrame( + { + "datetime": [datetime.utcnow() for _ in range(3)], + "customer": [1001, 1002, 1003], + "transaction": [1001, 1002, 1003], + "customer_fs:1:customer_feature_1": [1001, 1002, 1003], + "customer_fs:1:customer_feature_2": [1001, 1002, 1003], + } + ) + + final_results = tempfile.mktemp() + to_avro(file_path_or_buffer=final_results, df=expected_dataframe) + + mocker.patch.object( + mock_client._serving_service_stub, + "GetBatchFeatures", + return_value=GetBatchFeaturesResponse( + job=BatchFeaturesJob( + id="123", + type=JobType.JOB_TYPE_DOWNLOAD, + status=JobStatus.JOB_STATUS_DONE, + file_uris=[f"file://{final_results}"], + data_format=DataFormat.DATA_FORMAT_AVRO, + ) + ), + ) + + mocker.patch.object( + mock_client._serving_service_stub, + "GetJob", + return_value=GetJobResponse( + job=BatchFeaturesJob( + id="123", + type=JobType.JOB_TYPE_DOWNLOAD, + status=JobStatus.JOB_STATUS_DONE, + file_uris=[f"file://{final_results}"], + data_format=DataFormat.DATA_FORMAT_AVRO, + ) + ), + ) + + mocker.patch.object( + mock_client._serving_service_stub, + "GetFeastServingInfo", + return_value=GetFeastServingInfoResponse( + job_staging_location=f"file://{tempfile.mkdtemp()}/", + type=FeastServingType.FEAST_SERVING_TYPE_BATCH, + ), + ) + + response = mock_client.get_batch_features( + entity_rows=pd.DataFrame( + { + "datetime": [ + pd.datetime.now(tz=timezone("Asia/Singapore")) for _ in range(3) + ], + "customer": [1001, 1002, 1003], + "transaction": [1001, 1002, 1003], + } + ), + feature_ids=[ + "customer_fs:1:customer_feature_1", + "customer_fs:1:customer_feature_2", + ], + ) # type: Job + + assert response.id == "123" and response.status == JobStatus.JOB_STATUS_DONE + + actual_dataframe = response.to_dataframe() + + assert actual_dataframe[ + ["customer_fs:1:customer_feature_1", "customer_fs:1:customer_feature_2"] + ].equals( + expected_dataframe[ + ["customer_fs:1:customer_feature_1", "customer_fs:1:customer_feature_2"] + ] + ) + + def test_apply_feature_set_success(self, client): + + # Create Feature Sets + fs1 = FeatureSet("my-feature-set-1") + fs1.add(Feature(name="fs1-my-feature-1", dtype=ValueType.INT64)) + fs1.add(Feature(name="fs1-my-feature-2", dtype=ValueType.STRING)) + fs1.add(Entity(name="fs1-my-entity-1", dtype=ValueType.INT64)) + + fs2 = FeatureSet("my-feature-set-2") + fs2.add(Feature(name="fs2-my-feature-1", dtype=ValueType.STRING_LIST)) + fs2.add(Feature(name="fs2-my-feature-2", dtype=ValueType.BYTES_LIST)) + fs2.add(Entity(name="fs2-my-entity-1", dtype=ValueType.INT64)) + + # Register Feature Set with Core + client.apply(fs1) + client.apply(fs2) + + feature_sets = client.list_feature_sets() + + # List Feature Sets + assert ( + len(feature_sets) == 2 + and feature_sets[0].name == "my-feature-set-1" + and feature_sets[0].features[0].name == "fs1-my-feature-1" + and feature_sets[0].features[0].dtype == ValueType.INT64 + and feature_sets[1].features[1].dtype == ValueType.BYTES_LIST + ) + + @pytest.mark.parametrize("dataframe", [dataframes.GOOD]) + def test_feature_set_ingest_success(self, dataframe, client, mocker): + + driver_fs = FeatureSet("driver-feature-set") + driver_fs.add(Feature(name="feature_1", dtype=ValueType.FLOAT)) + driver_fs.add(Feature(name="feature_2", dtype=ValueType.STRING)) + driver_fs.add(Feature(name="feature_3", dtype=ValueType.INT64)) + driver_fs.add(Entity(name="entity_id", dtype=ValueType.INT64)) + + driver_fs.source = KafkaSource(topic="feature-topic", brokers="127.0.0.1") + + client._message_producer = MagicMock() + client._message_producer.produce = MagicMock() + + # Register with Feast core + client.apply(driver_fs) + + mocker.patch.object( + client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse(feature_set=driver_fs.to_proto()), + ) + + # Ingest data into Feast + client.ingest("driver-feature-set", dataframe=dataframe) + + @pytest.mark.parametrize( + "dataframe,exception", + [ + (dataframes.BAD_NO_DATETIME, Exception), + (dataframes.BAD_INCORRECT_DATETIME_TYPE, Exception), + (dataframes.BAD_NO_ENTITY, Exception), + (dataframes.NO_FEATURES, Exception), + ], + ) + def test_feature_set_ingest_failure(self, client, dataframe, exception): + with pytest.raises(exception): + # Create feature set + driver_fs = FeatureSet("driver-feature-set") + driver_fs.source = KafkaSource( + topic="feature-topic", brokers="fake.broker.com" + ) + client._message_producer = MagicMock() + client._message_producer.produce = MagicMock() + + # Update based on dataset + driver_fs.infer_fields_from_df(dataframe) + + # Register with Feast core + client.apply(driver_fs) + + # Ingest data into Feast + client.ingest(driver_fs, dataframe=dataframe) + + @pytest.mark.parametrize("dataframe", [dataframes.ALL_TYPES]) + def test_feature_set_types_success(self, client, dataframe, mocker): + + all_types_fs = FeatureSet( + name="all_types", + entities=[Entity(name="user_id", dtype=ValueType.INT64)], + features=[ + Feature(name="float_feature", dtype=ValueType.FLOAT), + Feature(name="int64_feature", dtype=ValueType.INT64), + Feature(name="int32_feature", dtype=ValueType.INT32), + Feature(name="string_feature", dtype=ValueType.STRING), + Feature(name="bytes_feature", dtype=ValueType.BYTES), + Feature(name="bool_feature", dtype=ValueType.BOOL), + Feature(name="double_feature", dtype=ValueType.DOUBLE), + Feature(name="float_list_feature", dtype=ValueType.FLOAT_LIST), + Feature(name="int64_list_feature", dtype=ValueType.INT64_LIST), + Feature(name="int32_list_feature", dtype=ValueType.INT32_LIST), + Feature(name="string_list_feature", dtype=ValueType.STRING_LIST), + Feature(name="bytes_list_feature", dtype=ValueType.BYTES_LIST), + Feature(name="bool_list_feature", dtype=ValueType.BOOL_LIST), + Feature(name="double_list_feature", dtype=ValueType.DOUBLE_LIST), + ], + max_age=Duration(seconds=3600), + ) + + all_types_fs.source = KafkaSource(topic="feature-topic", brokers="127.0.0.1") + client._message_producer = MagicMock() + client._message_producer.produce = MagicMock() + + # Register with Feast core + client.apply(all_types_fs) + + mocker.patch.object( + client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse(feature_set=all_types_fs.to_proto()), + ) + + # Ingest data into Feast + client.ingest(all_types_fs, dataframe=dataframe) diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py new file mode 100644 index 00000000000..57d7a8f8100 --- /dev/null +++ b/sdk/python/tests/test_feature_set.py @@ -0,0 +1,169 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import datetime + +import pytz + +from feast.entity import Entity +from feast.feature_set import FeatureSet, Feature +from feast.value_type import ValueType +from feast.client import Client +import pandas as pd +import pytest +from concurrent import futures +import grpc +from feast_core_server import CoreServicer +import feast.core.CoreService_pb2_grpc as Core +import dataframes + +CORE_URL = "core.feast.local" +SERVING_URL = "serving.feast.local" + + +class TestFeatureSet: + @pytest.fixture(scope="function") + def server(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + Core.add_CoreServiceServicer_to_server(CoreServicer(), server) + server.add_insecure_port("[::]:50051") + server.start() + yield server + server.stop(0) + + @pytest.fixture + def client(self, server): + return Client(core_url="localhost:50051") + + def test_add_remove_features_success(self): + fs = FeatureSet("my-feature-set") + fs.add(Feature(name="my-feature-1", dtype=ValueType.INT64)) + fs.add(Feature(name="my-feature-2", dtype=ValueType.INT64)) + fs.drop(name="my-feature-1") + assert len(fs.features) == 1 and fs.features[0].name == "my-feature-2" + + def test_remove_feature_failure(self): + with pytest.raises(ValueError): + fs = FeatureSet("my-feature-set") + fs.drop(name="my-feature-1") + + def test_update_from_source_failure(self): + with pytest.raises(Exception): + df = pd.DataFrame() + fs = FeatureSet("driver-feature-set") + fs.infer_fields_from_df(df) + + @pytest.mark.parametrize( + "dataframe,feature_count,entity_count,discard_unused_fields,features,entities", + [ + ( + dataframes.GOOD, + 3, + 1, + True, + [], + [Entity(name="entity_id", dtype=ValueType.INT64)], + ), + ( + dataframes.GOOD_FIVE_FEATURES, + 5, + 1, + True, + [], + [Entity(name="entity_id", dtype=ValueType.INT64)], + ), + ( + dataframes.GOOD_FIVE_FEATURES, + 6, + 1, + True, + [Feature(name="feature_6", dtype=ValueType.INT64)], + [Entity(name="entity_id", dtype=ValueType.INT64)], + ), + ( + dataframes.GOOD_FIVE_FEATURES_TWO_ENTITIES, + 5, + 2, + True, + [], + [ + Entity(name="entity_1_id", dtype=ValueType.INT64), + Entity(name="entity_2_id", dtype=ValueType.INT64), + ], + ), + ( + dataframes.GOOD_FIVE_FEATURES_TWO_ENTITIES, + 6, + 3, + False, + [], + [ + Entity(name="entity_1_id", dtype=ValueType.INT64), + Entity(name="entity_2_id", dtype=ValueType.INT64), + ], + ), + ( + dataframes.NO_FEATURES, + 0, + 1, + True, + [], + [Entity(name="entity_id", dtype=ValueType.INT64)], + ), + ( + pd.DataFrame( + { + "datetime": [ + datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3) + ] + } + ), + 0, + 0, + True, + [], + [], + ), + ], + ids=[ + "Test small dataframe update with hardcoded entity", + "Test larger dataframe update with hardcoded entity", + "Test larger dataframe update with hardcoded entity and feature", + "Test larger dataframe update with two hardcoded entities and discarding of existing fields", + "Test larger dataframe update with two hardcoded entities and retention of existing fields", + "Test dataframe with no featuresdataframe", + "Test empty dataframe", + ], + ) + def test_add_features_from_df_success( + self, + dataframe, + feature_count, + entity_count, + discard_unused_fields, + features, + entities, + ): + my_feature_set = FeatureSet( + name="my_feature_set", + features=[Feature(name="dummy_f1", dtype=ValueType.INT64)], + entities=[Entity(name="dummy_entity_1", dtype=ValueType.INT64)], + ) + my_feature_set.infer_fields_from_df( + dataframe, + discard_unused_fields=discard_unused_fields, + features=features, + entities=entities, + ) + assert len(my_feature_set.features) == feature_count + assert len(my_feature_set.entities) == entity_count diff --git a/sdk/python/tests/test_stores.py b/sdk/python/tests/test_stores.py new file mode 100644 index 00000000000..92445ecb7a7 --- /dev/null +++ b/sdk/python/tests/test_stores.py @@ -0,0 +1,60 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pytest +import stores +from feast.feature_set import FeatureSet +from feast.feature import Feature +from feast.entity import Entity +from feast.value_type import ValueType +from feast.types import ( + FeatureRow_pb2 as FeatureRowProto, + Field_pb2 as FieldProto, + Value_pb2 as ValueProto, +) +from google.protobuf.timestamp_pb2 import Timestamp + + +class TestStores: + @pytest.fixture(scope="module") + def sqlite_store(self): + return stores.SQLiteDatabase() + + def test_register_feature_set(self, sqlite_store): + fs = FeatureSet("my-feature-set") + fs.add(Feature(name="my-feature-1", dtype=ValueType.INT64)) + fs.add(Feature(name="my-feature-2", dtype=ValueType.INT64)) + fs.add(Entity(name="my-entity-1", dtype=ValueType.INT64)) + fs._version = 1 + feature_set_proto = fs.to_proto() + + sqlite_store.register_feature_set(feature_set_proto) + feature_row = FeatureRowProto.FeatureRow( + feature_set="feature_set_1", + event_timestamp=Timestamp(), + fields=[ + FieldProto.Field( + name="feature_1", value=ValueProto.Value(float_val=1.2) + ), + FieldProto.Field( + name="feature_2", value=ValueProto.Value(float_val=1.2) + ), + FieldProto.Field( + name="feature_3", value=ValueProto.Value(float_val=1.2) + ), + ], + ) + # sqlite_store.upsert_feature_row(feature_set_proto, feature_row) + assert True diff --git a/serving/README.md b/serving/README.md new file mode 100644 index 00000000000..f88f30923b2 --- /dev/null +++ b/serving/README.md @@ -0,0 +1,93 @@ +### Getting Started Guide for Feast Serving Developers + +Pre-requisites: + +- [Maven](https://maven.apache.org/install.html) build tool version 3.6.x +- A running Feast Core instance +- A running Store instance e.g. local Redis Store instance + +From the Feast project root directory, run the following Maven command to start Feast Serving gRPC service running on port 6566 locally: + +```bash +# Assumptions: +# - Local Feast Core is running on localhost:6565 +mvn -pl serving spring-boot:run -Dspring-boot.run.arguments=\ +--feast.store.config-path=./sample_redis_config.yml,\ +--feast.core-host=localhost,\ +--feast.core-port=6565 +``` + +If you have [grpc_cli](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md) installed, you can check that Feast Serving is running +``` +grpc_cli ls localhost:6566 +grpc_cli call localhost:6566 GetFeastServingVersion '' +grpc_cli call localhost:6566 GetFeastServingType '' +``` + +```bash +grpc_cli call localhost:6565 ApplyFeatureSet ' +feature_set { + name: "driver" + version: 1 + entities { + name: "driver_id" + value_type: STRING + } + features { + name: "city" + value_type: STRING + } + features { + name: "booking_completed_count" + value_type: INT64 + } + source { + type: KAFKA + kafka_source_config { + bootstrap_servers: "localhost:9092" + } + } +} +' + +grpc_cli call localhost:6565 GetFeatureSets ' +filter { + feature_set_name: "driver" + feature_set_version: "1" +} +' + +grpc_cli call localhost:6566 GetBatchFeatures ' +feature_sets { + name: "driver" + version: 1 + feature_names: "booking_completed_count" + max_age { + seconds: 86400 + } +} +entity_dataset { + entity_names: "driver_id" + entity_dataset_rows { + entity_timestamp { + seconds: 1569873954 + } + } +} +' +``` + +``` +python3 < 4.0.0 + feast feast-parent ${revision} + feast-serving Feast Serving - jar + Feature serving API service - - - false - - central - bintray - https://jcenter.bintray.com - spring-plugins Spring Plugins @@ -49,6 +43,9 @@ org.springframework.boot spring-boot-maven-plugin + + false + org.xolstice.maven.plugins @@ -77,93 +74,82 @@ + + + org.slf4j + slf4j-api + + + + org.springframework.boot + spring-boot-configuration-processor + true + + org.springframework.boot spring-boot-starter-web - ${springBootVersion} - - - org.springframework.boot - spring-boot-starter-logging - - org.springframework.boot spring-boot-starter-log4j2 - ${springBootVersion} - + + + org.springframework.boot + spring-boot-devtools + true + + + - org.lognet + io.github.lognet grpc-spring-boot-starter - 2.4.1 org.springframework.boot spring-boot-starter-actuator - ${springBootVersion} - - - - io.grpc - grpc-netty - ${grpcVersion} - io.grpc grpc-services - ${grpcVersion} io.grpc grpc-stub - ${grpcVersion} com.google.protobuf protobuf-java-util - ${protobufVersion} - - - com.google.cloud.bigtable - bigtable-hbase-2.x-shaded - 1.5.0 + io.pebbletemplates + pebble + 3.1.0 + redis.clients jedis - 2.9.0 com.google.guava guava - 26.0-jre - - - - commons-codec - commons-codec - 1.10 joda-time joda-time - 2.9.9 @@ -177,60 +163,41 @@ 0.31.0 - io.opentracing.contrib - opentracing-concurrent - 0.2.0 + io.opentracing + opentracing-noop + 0.31.0 - - - - io.micrometer - micrometer-core - 1.0.7 - - + - io.micrometer - micrometer-registry-statsd - 1.0.7 + com.google.cloud + google-cloud-bigquery + - com.datadoghq - java-dogstatsd-client - 2.6.1 + com.google.cloud + google-cloud-storage org.projectlombok lombok - 1.18.2 - provided - io.grpc grpc-testing - ${grpcVersion} - test org.springframework.boot spring-boot-starter-test - ${springBootVersion} - test - - - - com.github.kstyrc - embedded-redis - 0.6 test + + org.mockito mockito-core @@ -238,14 +205,19 @@ test + - com.google.guava - guava-testlib - 26.0-jre - test + org.hibernate + hibernate-core + 5.4.5.Final - + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + @@ -258,87 +230,26 @@ - io.fabric8 - docker-maven-plugin - 0.20.1 - - - prepare-bigtable-emulator - pre-integration-test - - start - - - - - spotify/bigtable-emulator - bigtable-emulator - - - 8080:8080 - - - - - - - - remove-bigtable-emulator - post-integration-test - - stop - - - - - - - - - profile-ci - - - ci - - - - - - io.fabric8 - docker-maven-plugin - 0.20.1 - - - prepare-bigtable-emulator - pre-integration-test - - start - - - - - spotify/bigtable-emulator - bigtable-emulator - - host - - 8080:8080 - - - - - - - - remove-bigtable-emulator - post-integration-test - - stop - - - + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + @ + + false + + + + + + src/main/resources + true + + + diff --git a/serving/sample_redis_config.yml b/serving/sample_redis_config.yml new file mode 100644 index 00000000000..d6008365e0f --- /dev/null +++ b/serving/sample_redis_config.yml @@ -0,0 +1,8 @@ +name: serving +type: REDIS +redis_config: + host: localhost + port: 6379 +subscriptions: + - name: "*" + version: ">0" diff --git a/serving/src/main/java/feast/serving/FeastProperties.java b/serving/src/main/java/feast/serving/FeastProperties.java new file mode 100644 index 00000000000..73cc91479fd --- /dev/null +++ b/serving/src/main/java/feast/serving/FeastProperties.java @@ -0,0 +1,48 @@ +package feast.serving; + +// Feast configuration properties that maps Feast configuration from default application.yml file to +// a Java object. +// https://www.baeldung.com/configuration-properties-in-spring-boot +// https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-external-config.html#boot-features-external-config-typesafe-configuration-properties + +import java.util.Map; +import lombok.Getter; +import lombok.Setter; +import org.springframework.boot.context.properties.ConfigurationProperties; + +@Getter +@Setter +@ConfigurationProperties(prefix = "feast") +public class FeastProperties { + private String version; + private String coreHost; + private int coreGrpcPort; + private StoreProperties store; + private JobProperties jobs; + private TracingProperties tracing; + + @Setter + @Getter + public static class StoreProperties { + private String configPath; + private int redisPoolMaxSize; + private int redisPoolMaxIdle; + } + + @Setter + @Getter + public static class JobProperties { + private String stagingLocation; + private String storeType; + private Map storeOptions; + } + + @Setter + @Getter + public static class TracingProperties { + private boolean enabled; + private String tracerName; + private String serviceName; + } + +} diff --git a/serving/src/main/java/feast/serving/ServingApiApplication.java b/serving/src/main/java/feast/serving/ServingApplication.java similarity index 78% rename from serving/src/main/java/feast/serving/ServingApiApplication.java rename to serving/src/main/java/feast/serving/ServingApplication.java index 891e4b319ed..3357996128c 100644 --- a/serving/src/main/java/feast/serving/ServingApiApplication.java +++ b/serving/src/main/java/feast/serving/ServingApplication.java @@ -19,11 +19,12 @@ import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.context.properties.EnableConfigurationProperties; @SpringBootApplication -public class ServingApiApplication { - +@EnableConfigurationProperties(FeastProperties.class) +public class ServingApplication { public static void main(String[] args) { - SpringApplication.run(ServingApiApplication.class, args); + SpringApplication.run(ServingApplication.class, args); } } diff --git a/serving/src/main/java/feast/serving/config/AppConfig.java b/serving/src/main/java/feast/serving/config/AppConfig.java deleted file mode 100644 index 77b56116ee9..00000000000 --- a/serving/src/main/java/feast/serving/config/AppConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.config; - -import lombok.Builder; -import lombok.Value; - -@Value -@Builder -public class AppConfig { - // maximum size of pool - int redisMaxPoolSize; - - // number of connection allowed to be idle - int redisMaxIdleSize; - - // max number of entity per thread. - int maxEntityPerBatch; - - // timeout for feature retrieval - int timeout; -} diff --git a/serving/src/main/java/feast/serving/config/GrpcServerConfigurer.java b/serving/src/main/java/feast/serving/config/GrpcServerConfigurer.java deleted file mode 100644 index 9368c9dfdac..00000000000 --- a/serving/src/main/java/feast/serving/config/GrpcServerConfigurer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.config; - -import io.grpc.ServerBuilder; -import java.util.concurrent.ExecutorService; -import org.lognet.springboot.grpc.GRpcServerBuilderConfigurer; -import org.springframework.stereotype.Component; - -/** - * Configuration for Grpc server. - */ -@Component -public class GrpcServerConfigurer extends GRpcServerBuilderConfigurer { - - private final ExecutorService executorService; - - public GrpcServerConfigurer( - ExecutorService executorService) { - this.executorService = executorService; - } - - @Override - public void configure(ServerBuilder serverBuilder) { - serverBuilder.executor(executorService); - } -} diff --git a/serving/src/main/java/feast/serving/config/InstrumentationConfig.java b/serving/src/main/java/feast/serving/config/InstrumentationConfig.java deleted file mode 100644 index 519f3d30258..00000000000 --- a/serving/src/main/java/feast/serving/config/InstrumentationConfig.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.config; - -import com.timgroup.statsd.NonBlockingStatsDClient; -import com.timgroup.statsd.StatsDClient; -import io.micrometer.core.instrument.MeterRegistry; -import io.opentracing.Tracer; -import java.net.InetAddress; -import java.net.UnknownHostException; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.actuate.autoconfigure.metrics.MeterRegistryCustomizer; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class InstrumentationConfig { - private static final String APP_NAME = "feast_serving"; - - @Bean - public StatsDClient getStatsDClient(@Value("${statsd.host}") String host, - @Value("${statsd.port}") int port) { - return new NonBlockingStatsDClient(APP_NAME, host, port); - } - - @Bean - public Tracer getTracer() { - io.jaegertracing.Configuration tracingConfig = - io.jaegertracing.Configuration.fromEnv(APP_NAME); - return tracingConfig.getTracer(); - } - - @Bean - MeterRegistryCustomizer metricsCommonTags() { - return registry -> { - try { - registry.config().commonTags("hostname", InetAddress.getLocalHost().getHostName()); - } catch (UnknownHostException e) { - registry.config().commonTags("hostname", APP_NAME); - } - }; - } -} \ No newline at end of file diff --git a/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java deleted file mode 100644 index 3b3ae6f35a9..00000000000 --- a/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.config; - -import com.google.common.base.Strings; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import feast.serving.service.CachedSpecStorage; -import feast.serving.service.CoreService; -import feast.serving.service.FeatureStorageRegistry; -import feast.serving.service.SpecStorage; -import feast.specs.StorageSpecProto.StorageSpec; -import io.opentracing.Tracer; -import io.opentracing.contrib.concurrent.TracedExecutorService; -import java.lang.reflect.Type; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.http.converter.HttpMessageConverter; -import org.springframework.http.converter.protobuf.ProtobufJsonFormatHttpMessageConverter; -import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - -/** - * Global bean configuration. - */ -@Slf4j -@Configuration -public class ServingApiConfiguration implements WebMvcConfigurer { - - @Autowired - private ProtobufJsonFormatHttpMessageConverter protobufConverter; - private ScheduledExecutorService scheduledExecutorService = - Executors.newSingleThreadScheduledExecutor(); - - private static Map convertJsonStringToMap(String jsonString) { - if (jsonString == null || jsonString.equals("") || jsonString.equals("{}")) { - return Collections.emptyMap(); - } - Type stringMapType = new TypeToken>() { - }.getType(); - return new Gson().fromJson(jsonString, stringMapType); - } - - - @Bean - public AppConfig getAppConfig( - @Value("${feast.redispool.maxsize}") int redisPoolMaxSize, - @Value("${feast.redispool.maxidle}") int redisPoolMaxIdle, - @Value("${feast.maxentity}") int maxEntityPerBatch, - @Value("${feast.timeout}") int timeout) { - return AppConfig.builder() - .maxEntityPerBatch(maxEntityPerBatch) - .redisMaxPoolSize(redisPoolMaxSize) - .redisMaxIdleSize(redisPoolMaxIdle) - .timeout(timeout) - .build(); - } - - @Bean - public SpecStorage getCoreServiceSpecStorage( - @Value("${feast.core.host}") String coreServiceHost, - @Value("${feast.core.grpc.port}") String coreServicePort, - @Value("${feast.cacheDurationMinute}") int cacheDurationMinute) { - final CachedSpecStorage cachedSpecStorage = - new CachedSpecStorage(new CoreService(coreServiceHost, Integer.parseInt(coreServicePort))); - - // reload all specs including new ones periodically - scheduledExecutorService.schedule( - cachedSpecStorage::populateCache, cacheDurationMinute, TimeUnit.MINUTES); - - // load all specs during start up - try { - cachedSpecStorage.populateCache(); - } catch (Exception e) { - log.error("Unable to preload feast's spec"); - } - return cachedSpecStorage; - } - - @Bean - public FeatureStorageRegistry getFeatureStorageRegistry( - @Value("${feast.store.serving.type}") String storageType, - @Value("${feast.store.serving.options}") String storageOptions, - AppConfig appConfig, Tracer tracer) { - storageOptions = Strings.isNullOrEmpty(storageOptions) ? "{}" : storageOptions; - Map optionsMap = convertJsonStringToMap(storageOptions); - - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId("SERVING") - .setType(storageType) - .putAllOptions(optionsMap) - .build(); - - FeatureStorageRegistry registry = new FeatureStorageRegistry(appConfig, tracer); - try { - registry.connect(storageSpec); - } catch (Exception e) { - log.error( - "Unable to create a pre-populated storage registry, connection will be made in ad-hoc basis", - e); - } - return registry; - } - - @Bean - public ListeningExecutorService getExecutorService( - Tracer tracer, @Value("${feast.threadpool.max}") int maxPoolSize) { - - ExecutorService executor = Executors.newFixedThreadPool(maxPoolSize); - return MoreExecutors.listeningDecorator(new TracedExecutorService(executor, tracer)); - } - - @Bean - ProtobufJsonFormatHttpMessageConverter protobufHttpMessageConverter() { - return new ProtobufJsonFormatHttpMessageConverter(); - } - - @Override - public void configureMessageConverters(List> converters) { - converters.add(protobufConverter); - } -} diff --git a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java new file mode 100644 index 00000000000..aa4faee6fc3 --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java @@ -0,0 +1,20 @@ +package feast.serving.configuration; + +import java.util.concurrent.ScheduledExecutorService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.stereotype.Component; + +@Component +public class ContextClosedHandler implements ApplicationListener { + + @Autowired + ScheduledExecutorService executor; + + @Override + public void onApplicationEvent(ContextClosedEvent event) { + executor.shutdown(); + } +} + diff --git a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java new file mode 100644 index 00000000000..b475369c4db --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java @@ -0,0 +1,32 @@ +package feast.serving.configuration; + +import feast.serving.FeastProperties; +import io.opentracing.Tracer; +import io.opentracing.noop.NoopTracerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class InstrumentationConfig { + private FeastProperties feastProperties; + + @Autowired + public InstrumentationConfig(FeastProperties feastProperties) { + this.feastProperties = feastProperties; + } + + @Bean + public Tracer tracer() { + if (!feastProperties.getTracing().isEnabled()) { + return NoopTracerFactory.create(); + } + + if (!feastProperties.getTracing().getTracerName().equalsIgnoreCase("jaeger")) { + throw new IllegalArgumentException("Only 'jaeger' tracer is supported for now."); + } + + return io.jaegertracing.Configuration.fromEnv(feastProperties.getTracing().getServiceName()) + .getTracer(); + } +} diff --git a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java new file mode 100644 index 00000000000..de80ff14e62 --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java @@ -0,0 +1,41 @@ +package feast.serving.configuration; + +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.serving.service.CachedSpecService; +import feast.serving.service.JobService; +import feast.serving.service.NoopJobService; +import feast.serving.service.RedisBackedJobService; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import redis.clients.jedis.Jedis; + +@Configuration +public class JobServiceConfig { + + @Bean + public JobService jobService(Store jobStore, + CachedSpecService specService) { + if (!specService.getStore().getType().equals(StoreType.BIGQUERY)) { + return new NoopJobService(); + } + + switch (jobStore.getType()) { + case REDIS: + RedisConfig redisConfig = jobStore.getRedisConfig(); + Jedis jedis = new Jedis(redisConfig.getHost(), redisConfig.getPort()); + return new RedisBackedJobService(jedis); + case INVALID: + case BIGQUERY: + case CASSANDRA: + case UNRECOGNIZED: + default: + throw new IllegalArgumentException( + String.format( + "Unsupported store type '%s' for job store name '%s'", jobStore.getType(), + jobStore.getName())); + } + } +} diff --git a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java new file mode 100644 index 00000000000..a3babf36b4e --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java @@ -0,0 +1,25 @@ +package feast.serving.configuration; + +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.http.converter.protobuf.ProtobufJsonFormatHttpMessageConverter; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; + +@Configuration +public class ServingApiConfiguration implements WebMvcConfigurer { + @Autowired + private ProtobufJsonFormatHttpMessageConverter protobufConverter; + + @Bean + ProtobufJsonFormatHttpMessageConverter protobufHttpMessageConverter() { + return new ProtobufJsonFormatHttpMessageConverter(); + } + + @Override + public void configureMessageConverters(List> converters) { + converters.add(protobufConverter); + } +} diff --git a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java new file mode 100644 index 00000000000..ac5263ec7a8 --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java @@ -0,0 +1,133 @@ +package feast.serving.configuration; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.BigQueryConfig; +import feast.core.StoreProto.Store.Builder; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import feast.serving.FeastProperties; +import feast.serving.FeastProperties.JobProperties; +import feast.serving.service.BigQueryServingService; +import feast.serving.service.CachedSpecService; +import feast.serving.service.JobService; +import feast.serving.service.NoopJobService; +import feast.serving.service.RedisServingService; +import feast.serving.service.ServingService; +import io.opentracing.Tracer; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.JedisPoolConfig; + +@Slf4j +@Configuration +public class ServingServiceConfig { + + @Bean(name="JobStore") + public Store jobStoreDefinition(FeastProperties feastProperties) { + JobProperties jobProperties = feastProperties.getJobs(); + if (feastProperties.getJobs().getStoreType().equals("")) { + return Store.newBuilder().build(); + } + Map options = jobProperties.getStoreOptions(); + Builder storeDefinitionBuilder = Store.newBuilder() + .setType(StoreType.valueOf(jobProperties.getStoreType())); + return setStoreConfig(storeDefinitionBuilder, options); + } + + private Store setStoreConfig(Store.Builder builder, Map options) { + switch (builder.getType()) { + case REDIS: + RedisConfig redisConfig = RedisConfig.newBuilder() + .setHost(options.get("host")) + .setPort(Integer.parseInt(options.get("port"))) + .build(); + return builder.setRedisConfig(redisConfig).build(); + case BIGQUERY: + BigQueryConfig bqConfig = BigQueryConfig.newBuilder() + .setProjectId(options.get("projectId")) + .setDatasetId(options.get("datasetId")) + .build(); + return builder.setBigqueryConfig(bqConfig).build(); + case CASSANDRA: + default: + throw new IllegalArgumentException(String.format( + "Unsupported store %s provided, only REDIS or BIGQUERY are currently supported.", + builder.getType())); + } + } + + + @Bean + public ServingService servingService( + FeastProperties feastProperties, + CachedSpecService specService, + JobService jobService, + Tracer tracer) { + ServingService servingService = null; + Store store = specService.getStore(); + + switch (store.getType()) { + case REDIS: + RedisConfig redisConfig = store.getRedisConfig(); + JedisPoolConfig poolConfig = new JedisPoolConfig(); + poolConfig.setMaxTotal(feastProperties.getStore().getRedisPoolMaxSize()); + poolConfig.setMaxIdle(feastProperties.getStore().getRedisPoolMaxIdle()); + JedisPool jedisPool = + new JedisPool( + poolConfig, redisConfig.getHost(), redisConfig.getPort()); + servingService = new RedisServingService(jedisPool, specService, tracer); + break; + case BIGQUERY: + BigQueryConfig bqConfig = store.getBigqueryConfig(); + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + Storage storage = StorageOptions.getDefaultInstance().getService(); + String jobStagingLocation = feastProperties.getJobs().getStagingLocation(); + if (!jobStagingLocation.contains("://")) { + throw new IllegalArgumentException( + String.format("jobStagingLocation is not a valid URI: %s", jobStagingLocation)); + } + if (jobStagingLocation.endsWith("/")) { + jobStagingLocation = jobStagingLocation.substring(0, jobStagingLocation.length() - 1); + } + if (!jobStagingLocation.startsWith("gs://")) { + throw new IllegalArgumentException( + "Store type BIGQUERY requires job staging location to be a valid and existing Google Cloud Storage URI. Invalid staging location: " + + jobStagingLocation); + } + if (jobService.getClass() == NoopJobService.class) { + throw new IllegalArgumentException("Unable to instantiate jobService for BigQuery store."); + } + servingService = + new BigQueryServingService( + bigquery, + bqConfig.getProjectId(), + bqConfig.getDatasetId(), + specService, + jobService, + jobStagingLocation, + storage); + break; + case CASSANDRA: + case UNRECOGNIZED: + case INVALID: + throw new IllegalArgumentException( + String.format( + "Unsupported store type '%s' for store name '%s'", store.getType(), store.getName())); + } + + return servingService; + } + + private Subscription parseSubscription(String subscription) { + String[] split = subscription.split(":"); + return Subscription.newBuilder().setName(split[0]).setVersion(split[1]).build(); + } +} diff --git a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java new file mode 100644 index 00000000000..9c7c74f3ca3 --- /dev/null +++ b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java @@ -0,0 +1,57 @@ +package feast.serving.configuration; + +import feast.serving.FeastProperties; +import feast.serving.service.CachedSpecService; +import feast.serving.service.CoreSpecService; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Slf4j +@Configuration +public class SpecServiceConfig { + + private String feastCoreHost; + private int feastCorePort; + private static final int CACHE_REFRESH_RATE_MINUTES = 1; + + + @Autowired + public SpecServiceConfig(FeastProperties feastProperties) { + feastCoreHost = feastProperties.getCoreHost(); + feastCorePort = feastProperties.getCoreGrpcPort(); + } + + @Bean + public ScheduledExecutorService cachedSpecServiceScheduledExecutorService( + CachedSpecService cachedSpecStorage) { + ScheduledExecutorService scheduledExecutorService = Executors + .newSingleThreadScheduledExecutor(); + // reload all specs including new ones periodically + scheduledExecutorService + .scheduleAtFixedRate(cachedSpecStorage::scheduledPopulateCache, CACHE_REFRESH_RATE_MINUTES, + CACHE_REFRESH_RATE_MINUTES, TimeUnit.MINUTES); + return scheduledExecutorService; + } + + @Bean + public CachedSpecService specService(FeastProperties feastProperties) { + + CoreSpecService coreService = new CoreSpecService(feastCoreHost, feastCorePort); + Path path = Paths.get(feastProperties.getStore().getConfigPath()); + CachedSpecService cachedSpecStorage = + new CachedSpecService(coreService, path); + try { + cachedSpecStorage.populateCache(); + } catch (Exception e) { + log.error("Unable to preload feast's spec"); + } + return cachedSpecStorage; + } +} diff --git a/serving/src/main/java/feast/serving/controller/HealthServiceController.java b/serving/src/main/java/feast/serving/controller/HealthServiceController.java new file mode 100644 index 00000000000..b9ae4503030 --- /dev/null +++ b/serving/src/main/java/feast/serving/controller/HealthServiceController.java @@ -0,0 +1,46 @@ +package feast.serving.controller; + +import feast.core.StoreProto.Store; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.service.CachedSpecService; +import feast.serving.service.ServingService; +import io.grpc.health.v1.HealthGrpc.HealthImplBase; +import io.grpc.health.v1.HealthProto.HealthCheckRequest; +import io.grpc.health.v1.HealthProto.HealthCheckResponse; +import io.grpc.health.v1.HealthProto.HealthCheckResponse.ServingStatus; +import io.grpc.stub.StreamObserver; +import org.lognet.springboot.grpc.GRpcService; +import org.springframework.beans.factory.annotation.Autowired; + +// Reference: https://github.com/grpc/grpc/blob/master/doc/health-checking.md + +@GRpcService +public class HealthServiceController extends HealthImplBase { + private CachedSpecService specService; + private ServingService servingService; + + @Autowired + public HealthServiceController(CachedSpecService specService, ServingService servingService) { + this.specService = specService; + this.servingService = servingService; + } + + @Override + public void check( + HealthCheckRequest request, StreamObserver responseObserver) { + // TODO: Implement proper logic to determine if ServingService is healthy e.g. + // if it's online service check that it the service can retrieve dummy/random feature set. + // Implement similary for batch service. + + try { + Store store = specService.getStore(); + servingService.getFeastServingInfo(GetFeastServingInfoRequest.getDefaultInstance()); + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(ServingStatus.SERVING).build()); + } catch (Exception e) { + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(ServingStatus.NOT_SERVING).build()); + } + responseObserver.onCompleted(); + } +} diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java new file mode 100644 index 00000000000..1a51bb5cda9 --- /dev/null +++ b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java @@ -0,0 +1,87 @@ +package feast.serving.controller; + +import feast.serving.FeastProperties; +import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetJobRequest; +import feast.serving.ServingAPIProto.GetJobResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingServiceGrpc.ServingServiceImplBase; +import feast.serving.service.ServingService; +import feast.serving.util.RequestHelper; +import io.grpc.stub.StreamObserver; +import io.opentracing.Scope; +import io.opentracing.Span; +import io.opentracing.Tracer; +import lombok.extern.slf4j.Slf4j; +import org.lognet.springboot.grpc.GRpcService; +import org.springframework.beans.factory.annotation.Autowired; + +@Slf4j +@GRpcService +public class ServingServiceGRpcController extends ServingServiceImplBase { + private final ServingService servingService; + private final String version; + private final Tracer tracer; + + @Autowired + public ServingServiceGRpcController( + ServingService servingService, FeastProperties feastProperties, Tracer tracer) { + this.servingService = servingService; + this.version = feastProperties.getVersion(); + this.tracer = tracer; + } + + @Override + public void getFeastServingInfo(GetFeastServingInfoRequest request, + StreamObserver responseObserver) { + GetFeastServingInfoResponse feastServingInfo = servingService.getFeastServingInfo(request); + feastServingInfo = feastServingInfo.toBuilder() + .setVersion(version) + .build(); + responseObserver.onNext(feastServingInfo); + responseObserver.onCompleted(); + } + + @Override + public void getOnlineFeatures( + GetOnlineFeaturesRequest request, StreamObserver responseObserver) { + Span span = tracer.buildSpan("getOnlineFeatures").start(); + try (Scope scope = tracer.scopeManager().activate(span, false)) { + RequestHelper.validateOnlineRequest(request); + GetOnlineFeaturesResponse onlineFeatures = servingService.getOnlineFeatures(request); + responseObserver.onNext(onlineFeatures); + responseObserver.onCompleted(); + } catch (Exception e) { + responseObserver.onError(e); + } + span.finish(); + } + + @Override + public void getBatchFeatures( + GetBatchFeaturesRequest request, StreamObserver responseObserver) { + try { + RequestHelper.validateBatchRequest(request); + GetBatchFeaturesResponse batchFeatures = servingService.getBatchFeatures(request); + responseObserver.onNext(batchFeatures); + responseObserver.onCompleted(); + } catch (Exception e) { + responseObserver.onError(e); + } + } + + @Override + public void getJob(GetJobRequest request, StreamObserver responseObserver) { + try { + GetJobResponse response = servingService.getJob(request); + responseObserver.onNext(response); + responseObserver.onCompleted(); + } catch (Exception e) { + responseObserver.onError(e); + } + } +} diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java new file mode 100644 index 00000000000..2eeb9014633 --- /dev/null +++ b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java @@ -0,0 +1,52 @@ +package feast.serving.controller; + +import static feast.serving.util.mappers.ResponseJSONMapper.mapGetOnlineFeaturesResponse; + +import feast.serving.FeastProperties; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.service.ServingService; +import feast.serving.util.RequestHelper; +import io.opentracing.Tracer; +import java.util.List; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class ServingServiceRestController { + + private final ServingService servingService; + private final String version; + private final Tracer tracer; + + @Autowired + public ServingServiceRestController( + ServingService servingService, FeastProperties feastProperties, Tracer tracer) { + this.servingService = servingService; + this.version = feastProperties.getVersion(); + this.tracer = tracer; + } + + @RequestMapping(value = "/api/v1/info", produces = "application/json") + public GetFeastServingInfoResponse getInfo() { + GetFeastServingInfoResponse feastServingInfo = servingService + .getFeastServingInfo(GetFeastServingInfoRequest.getDefaultInstance()); + return feastServingInfo.toBuilder().setVersion(version).build(); + } + + @RequestMapping( + value = "/api/v1/features/online", + produces = "application/json", + consumes = "application/json") + public List> getOnlineFeatures( + @RequestBody GetOnlineFeaturesRequest request) { + RequestHelper.validateOnlineRequest(request); + GetOnlineFeaturesResponse onlineFeatures = servingService.getOnlineFeatures(request); + return mapGetOnlineFeaturesResponse(onlineFeatures); + } +} diff --git a/serving/src/main/java/feast/serving/exception/FeatureRetrievalException.java b/serving/src/main/java/feast/serving/exception/FeatureRetrievalException.java deleted file mode 100644 index 5770aa337a6..00000000000 --- a/serving/src/main/java/feast/serving/exception/FeatureRetrievalException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.exception; - -/** Application-specific exception for any failure of retrieving feature from Feast Storage. */ -public class FeatureRetrievalException extends RuntimeException { - public FeatureRetrievalException() { - super(); - } - - public FeatureRetrievalException(String message) { - super(message); - } - - public FeatureRetrievalException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/serving/src/main/java/feast/serving/grpc/RemoteAddressInterceptor.java b/serving/src/main/java/feast/serving/grpc/RemoteAddressInterceptor.java deleted file mode 100644 index 73c9c1cf265..00000000000 --- a/serving/src/main/java/feast/serving/grpc/RemoteAddressInterceptor.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.serving.grpc; - -import static feast.serving.util.StatsUtil.REMOTE_ADDRESS; -import static io.grpc.Grpc.TRANSPORT_ATTR_REMOTE_ADDR; - -import io.grpc.Context; -import io.grpc.Contexts; -import io.grpc.Metadata; -import io.grpc.ServerCall; -import io.grpc.ServerCall.Listener; -import io.grpc.ServerCallHandler; -import io.grpc.ServerInterceptor; -import java.net.SocketAddress; -import lombok.extern.slf4j.Slf4j; -import org.lognet.springboot.grpc.GRpcGlobalInterceptor; - -/** - * Interceptor for retrieving {@link io.grpc.Grpc#TRANSPORT_ATTR_REMOTE_ADDR} and storing it in - * current Context. - */ -@Slf4j -@GRpcGlobalInterceptor -public class RemoteAddressInterceptor implements ServerInterceptor { - - @Override - public Listener interceptCall(ServerCall call, Metadata headers, - ServerCallHandler next) { - Context context = Context.current(); - try { - SocketAddress client = call.getAttributes().get(TRANSPORT_ATTR_REMOTE_ADDR); - context = context.withValue(REMOTE_ADDRESS, client); - } catch (Exception e) { - log.error("Unable to get remote address", e); - } - return Contexts.interceptCall(context, call, headers, next); - } -} diff --git a/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java b/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java deleted file mode 100644 index 51622d8ea65..00000000000 --- a/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.grpc; - -import static feast.serving.util.RequestHelper.validateRequest; -import static feast.serving.util.StatsUtil.makeStatsdTags; -import static io.grpc.Status.Code.INTERNAL; - -import com.timgroup.statsd.StatsDClient; -import feast.serving.ServingAPIGrpc.ServingAPIImplBase; -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import feast.serving.ServingAPIProto.QueryFeaturesResponse; -import feast.serving.service.FeastServing; -import feast.serving.util.TimeUtil; -import io.grpc.Status; -import io.grpc.StatusRuntimeException; -import io.grpc.stub.StreamObserver; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.Tracer; -import lombok.extern.slf4j.Slf4j; -import org.lognet.springboot.grpc.GRpcService; -import org.springframework.beans.factory.annotation.Autowired; - -/** Grpc service implementation for Serving API. */ -@Slf4j -@GRpcService -public class ServingGrpcService extends ServingAPIImplBase { - - private final FeastServing feast; - private final Tracer tracer; - private final StatsDClient statsDClient; - - @Autowired - public ServingGrpcService(FeastServing feast, Tracer tracer, StatsDClient statsDClient) { - this.feast = feast; - this.tracer = tracer; - this.statsDClient = statsDClient; - } - - /** Query feature values from Feast storage. */ - @Override - public void queryFeatures(QueryFeaturesRequest request, StreamObserver responseObserver) { - long currentMicro = TimeUtil.microTime(); - Span span = tracer.buildSpan("ServingGrpcService-queryFeatures").start(); - String[] tags = makeStatsdTags(request); - statsDClient.increment("query_features_count", tags); - statsDClient.gauge("query_features_entity_count", request.getEntityIdCount(), tags); - statsDClient.gauge("query_features_feature_count", request.getFeatureIdCount(), tags); - try (Scope scope = tracer.scopeManager().activate(span, false)) { - Span innerSpan = scope.span(); - validateRequest(request); - QueryFeaturesResponse response = feast.queryFeatures(request); - - innerSpan.log("calling onNext"); - responseObserver.onNext(response); - innerSpan.log("calling onCompleted"); - responseObserver.onCompleted(); - innerSpan.log("all done"); - statsDClient.increment("query_feature_success", tags); - } catch (Exception e) { - statsDClient.increment("query_feature_failed", tags); - log.error("Error: {}", e.getMessage()); - responseObserver.onError( - new StatusRuntimeException( - Status.fromCode(INTERNAL).withDescription(e.getMessage()).withCause(e))); - } finally { - statsDClient.gauge("query_features_latency_us", TimeUtil.microTime() - currentMicro, tags); - span.finish(); - } - } -} diff --git a/serving/src/main/java/feast/serving/http/Error.java b/serving/src/main/java/feast/serving/http/Error.java deleted file mode 100644 index ff3ecd9e525..00000000000 --- a/serving/src/main/java/feast/serving/http/Error.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.http; - -import lombok.AllArgsConstructor; -import lombok.Getter; -import org.springframework.http.HttpStatus; - -/** Class representing detailed error information. */ -@Getter -@AllArgsConstructor -public class Error { - /** - * Translation of http status code. - */ - private HttpStatus status; - - /** - * Detailed error message. - */ - private String message; - -} diff --git a/serving/src/main/java/feast/serving/http/HealthHttpService.java b/serving/src/main/java/feast/serving/http/HealthHttpService.java deleted file mode 100644 index 45de3adab6f..00000000000 --- a/serving/src/main/java/feast/serving/http/HealthHttpService.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.http; - -import feast.serving.service.SpecStorage; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -/** HTTP end-point for kubernetes health check. */ -@RestController -@Slf4j -public class HealthHttpService { - - private final SpecStorage specStorage; - - @Autowired - public HealthHttpService(SpecStorage specStorage) { - this.specStorage = specStorage; - } - - @RequestMapping("/ping") - public String ping() { - return "pong"; - } - - @RequestMapping("/healthz") - public String healthz() { - if (specStorage.isConnected()) { - return "healthy"; - } - log.error("not ready: unable to connect to core service"); - throw new IllegalStateException("not ready: unable to connect to core service"); - } -} diff --git a/serving/src/main/java/feast/serving/http/HttpExceptionHandler.java b/serving/src/main/java/feast/serving/http/HttpExceptionHandler.java deleted file mode 100644 index d29fbba7781..00000000000 --- a/serving/src/main/java/feast/serving/http/HttpExceptionHandler.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.http; - -import feast.serving.exception.FeatureRetrievalException; -import feast.serving.exception.SpecRetrievalException; -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.http.HttpHeaders; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.ControllerAdvice; -import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.context.request.WebRequest; -import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; - -import java.util.NoSuchElementException; - -/** - * Exception Handler to translate exception thrown by the application into appropriate error - * response. - */ -@Order(Ordered.HIGHEST_PRECEDENCE) -@ControllerAdvice -public class HttpExceptionHandler extends ResponseEntityExceptionHandler { - @ExceptionHandler(NoSuchElementException.class) - protected ResponseEntity handleNoSuchElementException(NoSuchElementException ex) { - Error error = new Error(HttpStatus.NOT_FOUND, ex.getMessage()); - return buildResponseEntity(error); - } - - @ExceptionHandler(SpecRetrievalException.class) - protected ResponseEntity handleSpecRetrievalException(SpecRetrievalException ex) { - Error error = new Error(HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); - return buildResponseEntity(error); - } - - @ExceptionHandler(FeatureRetrievalException.class) - protected ResponseEntity handleFeatureRetrievalException(FeatureRetrievalException ex) { - Error error = new Error(HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); - return buildResponseEntity(error); - } - - @ExceptionHandler(IllegalStateException.class) - protected ResponseEntity handleIllegalStateException(IllegalStateException ex) { - Error error = new Error(HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); - return buildResponseEntity(error); - } - - @ExceptionHandler(IllegalArgumentException.class) - protected ResponseEntity handleIllegalArgumentException(IllegalArgumentException ex) { - Error error = new Error(HttpStatus.BAD_REQUEST, ex.getMessage()); - return buildResponseEntity(error); - } - - @Override - protected ResponseEntity handleExceptionInternal( - Exception ex, Object body, HttpHeaders headers, HttpStatus status, WebRequest request) { - Error error = new Error(status, ex.getMessage()); - return buildResponseEntity(error); - } - - protected ResponseEntity buildResponseEntity(Error error) { - return new ResponseEntity(error, error.getStatus()); - } -} diff --git a/serving/src/main/java/feast/serving/http/ServingHttpService.java b/serving/src/main/java/feast/serving/http/ServingHttpService.java deleted file mode 100644 index 48164a9d495..00000000000 --- a/serving/src/main/java/feast/serving/http/ServingHttpService.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.http; - -import static feast.serving.util.RequestHelper.validateRequest; - -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import feast.serving.ServingAPIProto.QueryFeaturesResponse; -import feast.serving.service.FeastServing; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -/** HTTP endpoint for Serving API. */ -@RestController -public class ServingHttpService { - private final FeastServing feastServing; - - @Autowired - public ServingHttpService(FeastServing feastServing) { - this.feastServing = feastServing; - } - - @RequestMapping( - value = "/api/v1/features/request", - produces = "application/json", - consumes = "application/json") - public QueryFeaturesResponse queryFeature(@RequestBody QueryFeaturesRequest request) { - validateRequest(request); - return feastServing.queryFeatures(request); - } -} diff --git a/serving/src/main/java/feast/serving/model/FeatureValue.java b/serving/src/main/java/feast/serving/model/FeatureValue.java deleted file mode 100644 index 7d65a7c1c11..00000000000 --- a/serving/src/main/java/feast/serving/model/FeatureValue.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.model; - -import com.google.protobuf.Timestamp; -import feast.types.ValueProto; -import lombok.AllArgsConstructor; -import lombok.Value; - -/** Class representing a feature value in a given time. */ -@Value -@AllArgsConstructor -public class FeatureValue { - /** Feature Id of the feature */ - String featureId; - - /** Entity Id of the feature */ - String entityId; - - /** Value of the feature. */ - ValueProto.Value value; - - /** Timestamp of the feature.*/ - Timestamp timestamp; -} diff --git a/serving/src/main/java/feast/serving/service/BigQueryServingService.java b/serving/src/main/java/feast/serving/service/BigQueryServingService.java new file mode 100644 index 00000000000..9d93d4dd06c --- /dev/null +++ b/serving/src/main/java/feast/serving/service/BigQueryServingService.java @@ -0,0 +1,326 @@ +package feast.serving.service; + +import static feast.serving.util.BigQueryUtil.getTimestampLimitQuery; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.ExtractJobConfiguration; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.LoadJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.Storage.BlobListOption; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto; +import feast.serving.ServingAPIProto.DataFormat; +import feast.serving.ServingAPIProto.DatasetSource; +import feast.serving.ServingAPIProto.FeastServingType; +import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetJobRequest; +import feast.serving.ServingAPIProto.GetJobResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingAPIProto.JobStatus; +import feast.serving.ServingAPIProto.JobType; +import feast.serving.util.BigQueryUtil; +import io.grpc.Status; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class BigQueryServingService implements ServingService { + + private static final Long TABLE_EXPIRATION_TIME = 172800000L; + + private final BigQuery bigquery; + private final String projectId; + private final String datasetId; + private final CachedSpecService specService; + private final JobService jobService; + private final String jobStagingLocation; + private final Storage storage; + + public BigQueryServingService( + BigQuery bigquery, + String projectId, + String datasetId, + CachedSpecService specService, + JobService jobService, + String jobStagingLocation, + Storage storage) { + this.bigquery = bigquery; + this.projectId = projectId; + this.datasetId = datasetId; + this.specService = specService; + this.jobService = jobService; + this.jobStagingLocation = jobStagingLocation; + this.storage = storage; + } + + /** + * {@inheritDoc} + */ + @Override + public GetFeastServingInfoResponse getFeastServingInfo( + GetFeastServingInfoRequest getFeastServingInfoRequest) { + return GetFeastServingInfoResponse.newBuilder() + .setType(FeastServingType.FEAST_SERVING_TYPE_BATCH) + .setJobStagingLocation(jobStagingLocation) + .build(); + } + + /** + * {@inheritDoc} + */ + @Override + public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest getFeaturesRequest) { + throw Status.UNIMPLEMENTED.withDescription("Method not implemented").asRuntimeException(); + } + + /** + * {@inheritDoc} + */ + @Override + public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest) { + + List featureSetSpecs = + getFeaturesRequest.getFeatureSetsList().stream() + .map(featureSet -> + specService.getFeatureSet(featureSet.getName(), featureSet.getVersion()) + ) + .collect(Collectors.toList()); + + if (getFeaturesRequest.getFeatureSetsList().size() != featureSetSpecs.size()) { + throw Status.INVALID_ARGUMENT + .withDescription( + "Some of the feature sets requested do not exist in Feast. Please check your request payload.") + .asRuntimeException(); + } + + Table entityTable = loadEntities(getFeaturesRequest.getDatasetSource()); + String entityTableName = entityTable.getTableId().getTable(); + //TODO: add expiration to temp tables +// entityTable = entityTable.toBuilder().setExpirationTime(TABLE_EXPIRATION_TIME).build(); +// entityTable.update(TableOption.fields(TableField.EXPIRATION_TIME)); + FieldValueList timestampLimits = getTimestampLimits(entityTableName); + + Schema entityTableSchema = entityTable.getDefinition().getSchema(); + List entityNames = entityTableSchema.getFields().stream() + .map(Field::getName) + .filter(name -> !name.equals("event_timestamp")) + .collect(Collectors.toList()); + + String query; + try { + query = + BigQueryUtil.createQuery( + getFeaturesRequest.getFeatureSetsList(), + featureSetSpecs, + entityNames, + projectId, + datasetId, + entityTableName, + timestampLimits.get("min").getStringValue(), + timestampLimits.get("max").getStringValue()); + log.info("Running BigQuery query: {}", query); + } catch (IOException e) { + throw new RuntimeException("Unable to generate query for batch retrieval"); + } + + String feastJobId = UUID.randomUUID().toString(); + ServingAPIProto.Job feastJob = + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_PENDING) + .build(); + jobService.upsert(feastJob); + + new Thread( + () -> { + QueryJobConfiguration queryConfig; + Job queryJob; + + try { + queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(projectId, datasetId)) + .build(); + queryJob = bigquery.create(JobInfo.of(queryConfig)); + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_RUNNING) + .build()); + queryJob.waitFor(); + } catch (BigQueryException | InterruptedException e) { + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .setError(e.getMessage()) + .build()); + return; + } + + try { + queryConfig = queryJob.getConfiguration(); + String exportTableDestinationUri = + String.format("%s/%s/*.avro", jobStagingLocation, feastJobId); + + // Hardcode the format to Avro for now + ExtractJobConfiguration extractConfig = + ExtractJobConfiguration.of( + queryConfig.getDestinationTable(), exportTableDestinationUri, "Avro"); + Job extractJob = bigquery.create(JobInfo.of(extractConfig)); + extractJob.waitFor(); + } catch (BigQueryException | InterruptedException e) { + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .setError(e.getMessage()) + .build()); + return; + } + + String scheme = jobStagingLocation.substring(0, jobStagingLocation.indexOf("://")); + String stagingLocationNoScheme = + jobStagingLocation.substring(jobStagingLocation.indexOf("://") + 3); + String bucket = stagingLocationNoScheme.split("/")[0]; + List prefixParts = new ArrayList<>(); + prefixParts.add( + stagingLocationNoScheme.contains("/") && !stagingLocationNoScheme.endsWith("/") + ? stagingLocationNoScheme.substring(stagingLocationNoScheme.indexOf("/") + 1) + : ""); + prefixParts.add(feastJobId); + String prefix = String.join("/", prefixParts) + "/"; + + List fileUris = new ArrayList<>(); + for (Blob blob : storage.list(bucket, BlobListOption.prefix(prefix)).iterateAll()) { + fileUris.add(String.format("%s://%s/%s", scheme, blob.getBucket(), blob.getName())); + } + + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .addAllFileUris(fileUris) + .setDataFormat(DataFormat.DATA_FORMAT_AVRO) + .build()); + }) + .start(); + + return GetBatchFeaturesResponse.newBuilder().setJob(feastJob).build(); + } + + private FieldValueList getTimestampLimits(String entityTableName) { + QueryJobConfiguration getTimestampLimitsQuery = QueryJobConfiguration + .newBuilder(getTimestampLimitQuery(projectId, datasetId, entityTableName)) + .setDefaultDataset(DatasetId.of(projectId, datasetId)).build(); + try { + Job job = bigquery + .create(JobInfo.of(getTimestampLimitsQuery)); + TableResult getTimestampLimitsQueryResult = job + .waitFor() + .getQueryResults(); + FieldValueList result = null; + for (FieldValueList fields : getTimestampLimitsQueryResult.getValues()) { + result = fields; + } + if (result == null || result.get("min").isNull() || result.get("max").isNull()) { + throw new RuntimeException("query returned insufficient values"); + } + return result; + } catch (InterruptedException e) { + throw Status.INTERNAL + .withDescription("Unable to extract min and max timestamps from query") + .withCause(e) + .asRuntimeException(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public GetJobResponse getJob(GetJobRequest getJobRequest) { + Optional job = jobService.get(getJobRequest.getJob().getId()); + if (!job.isPresent()) { + throw Status.NOT_FOUND + .withDescription(String.format("Job not found: %s", getJobRequest.getJob().getId())) + .asRuntimeException(); + } + return GetJobResponse.newBuilder().setJob(job.get()).build(); + } + + private Table loadEntities(DatasetSource datasetSource) { + switch (datasetSource.getDatasetSourceCase()) { + case FILE_SOURCE: + try { + String tableName = generateTemporaryTableName(); + log.info("Loading entity dataset to table {}.{}.{}", projectId, datasetId, tableName); + TableId tableId = TableId.of(projectId, datasetId, tableName); + // Currently only avro supported + if (datasetSource.getFileSource().getDataFormat() != DataFormat.DATA_FORMAT_AVRO) { + throw Status.INVALID_ARGUMENT + .withDescription("Invalid file format, only avro supported") + .asRuntimeException(); + } + LoadJobConfiguration loadJobConfiguration = LoadJobConfiguration.of(tableId, + datasetSource.getFileSource().getFileUrisList(), + FormatOptions.avro()); + loadJobConfiguration = loadJobConfiguration.toBuilder() + .setUseAvroLogicalTypes(true) + .build(); + Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); + job.waitFor(); + Table entityTable = bigquery.getTable(tableId); + if (!entityTable.exists()) { + throw new RuntimeException("Unable to create entity dataset table"); + } + return entityTable; + } catch (Exception e) { + log.error("Exception has occurred in loadEntities method: ", e); + throw Status.INTERNAL + .withDescription("Failed to load entity dataset into store") + .withCause(e) + .asRuntimeException(); + } + case DATASETSOURCE_NOT_SET: + default: + throw Status.INVALID_ARGUMENT + .withDescription("Data source must be set.") + .asRuntimeException(); + } + } + + private String generateTemporaryTableName() { + String source = String.format("feastserving%d", System.currentTimeMillis()); + String guid = UUID.nameUUIDFromBytes(source.getBytes()).toString(); + String suffix = guid.substring(0, Math.min(guid.length(), 10)).replaceAll("-", ""); + return String.format("temp_%s", suffix); + } +} diff --git a/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java b/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java deleted file mode 100644 index c2681a6ce23..00000000000 --- a/serving/src/main/java/feast/serving/service/BigTableFeatureStorage.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.cloud.bigtable.hbase.BigtableConfiguration; -import com.google.common.base.Preconditions; -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import feast.serving.exception.FeatureRetrievalException; -import feast.serving.model.FeatureValue; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.storage.BigTableProto.BigTableRowKey; -import feast.types.ValueProto.Value; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Table; - -/** - * Connector to BigTable instance. - */ -@Slf4j -public class BigTableFeatureStorage implements FeatureStorage { - - public static final String TYPE = "bigtable"; - private static final String DEFAULT_COLUMN_FAMILY = "default"; - public static String OPT_BIGTABLE_PROJECT = "project"; - public static String OPT_BIGTABLE_INSTANCE = "instance"; - public static String OPT_BIGTABLE_TABLE_PREFIX = "tablePrefix"; - public static String STORAGE_OPT_BIGTABLE_COLUMN_FAMILY = "family"; - public static String FEATURE_OPT_BIGTABLE_COLUMN_FAMILY = "bigtable.family"; - - private final StorageSpec storageSpec; - private final BigTableConnectionFactory connectionFactory; - private transient Connection connection; - - - public BigTableFeatureStorage(StorageSpec storageSpec) { - Preconditions.checkArgument(storageSpec.getType().equals(TYPE)); - this.storageSpec = storageSpec; - this.connectionFactory = new BigTableConnectionFactory(storageSpec); - } - - /** - * For tests - */ - public BigTableFeatureStorage(StorageSpec storageSpec, - BigTableConnectionFactory connectionFactory) { - Preconditions.checkArgument(storageSpec.getType().equals(TYPE)); - this.storageSpec = storageSpec; - this.connectionFactory = connectionFactory; - } - - protected Connection getConnection() { - if (connection == null) { - connection = connectionFactory.connect(); - } - return connection; - } - - /** - * {@inheritDoc} - */ - @Override - public List getFeature( - String entityName, Collection entityIds, Collection featureSpecs) { - List featureValues = new ArrayList<>(entityIds.size() * featureSpecs.size()); - for (FeatureSpec featureSpec : featureSpecs) { - featureValues.addAll(getCurrentFeatureInternal(entityName, entityIds, featureSpec)); - } - return featureValues; - } - - /** - * Internal implementation of get current value of a feature for a list of entity Ids. - * - * @param entityName entity name. - * @param entityIds list of entity id. - * @param featureSpec spec of the feature. - * @return list of feature value. - */ - private List getCurrentFeatureInternal( - String entityName, Collection entityIds, FeatureSpec featureSpec) { - List features = new ArrayList<>(entityIds.size()); - String featureId = featureSpec.getId(); - byte[] featureIdBytes = featureSpec.getId().getBytes(); - List gets = createGets(entityIds, featureSpec); - try (Table table = getConnection().getTable(TableName.valueOf(entityName))) { - Result[] results = table.get(gets); - for (Result result : results) { - Cell currentCell = result.getColumnLatestCell(getColumnFamily(featureSpec), featureIdBytes); - if (currentCell == null) { - continue; - } - - byte[] rawRowKey = currentCell.getRowArray(); - if (rawRowKey == null) { - continue; - } - - BigTableRowKey rowKey = BigTableRowKey.parseFrom(rawRowKey); - String entityId = rowKey.getEntityKey(); - byte[] rawCellValue = currentCell.getValueArray(); - - if (rawCellValue == null) { - continue; - } - - Timestamp timestamp = Timestamps.fromMillis(currentCell.getTimestamp()); - Value value = Value.parseFrom(rawCellValue); - FeatureValue featureValue = new FeatureValue(featureId, entityId, value, timestamp); - features.add(featureValue); - } - return features; - } catch (IOException e) { - log.error("Error while retrieving feature from BigTable", e); - throw new FeatureRetrievalException("Error while retrieving feature from BigTable", e); - } - } - - /** - * Create list of get operation for retrieving a feature of several entities optionally filtered - * by its timestamp. - * - * @param entityIds list of entity ID. - * @param featureSpec feature spec - * @return list of Get operation. - */ - private List createGets(Collection entityIds, FeatureSpec featureSpec) { - byte[] featureIdBytes = featureSpec.getId().getBytes(); - byte[] columnFamily = getColumnFamily(featureSpec); - List gets = new ArrayList<>(); - for (String entityId : entityIds) { - String entityIdPrefix = DigestUtils.sha1Hex(entityId.getBytes()).substring(0, 7); - BigTableRowKey btKey = createRowKey(entityIdPrefix, entityId, "0"); - Get get = new Get(btKey.toByteArray()); - get.addColumn(columnFamily, featureIdBytes); - try { - get.readVersions(1); - } catch (IOException e) { - log.error("should not happen"); - } - gets.add(get); - } - return gets; - } - - /** - * Create BigTableRowKey based on entityId, and timestamp. - * - * @param entityIdPrefix hash prefix of entity ID. - * @param entityId entity ID value - * @param reversedMillisTimestamp reversed timestamp value. - * @return instance of {@link BigTableRowKey} assocciated with the entity ID. - */ - private BigTableRowKey createRowKey( - String entityIdPrefix, String entityId, String reversedMillisTimestamp) { - return BigTableRowKey.newBuilder() - .setSha1Prefix(entityIdPrefix) - .setEntityKey(entityId) - .setReversedMillis(reversedMillisTimestamp) - .build(); - } - - /** - * Get column family of a feature from its spec. - * - * @param fs feature's spec - * @return byte array value of the column family. - */ - private byte[] getColumnFamily(FeatureSpec fs) { - String family = - fs.getOptionsOrDefault(FEATURE_OPT_BIGTABLE_COLUMN_FAMILY, null); - if (family == null) { - family = storageSpec - .getOptionsOrDefault(STORAGE_OPT_BIGTABLE_COLUMN_FAMILY, DEFAULT_COLUMN_FAMILY); - } - return family.getBytes(); - } - - @AllArgsConstructor - public static class BigTableConnectionFactory { - - private StorageSpec storageSpec; - - Connection connect() { - return BigtableConfiguration.connect( - storageSpec.getOptionsOrThrow(BigTableFeatureStorage.OPT_BIGTABLE_PROJECT), - storageSpec.getOptionsOrThrow(BigTableFeatureStorage.OPT_BIGTABLE_INSTANCE)); - } - } -} diff --git a/serving/src/main/java/feast/serving/service/CachedSpecService.java b/serving/src/main/java/feast/serving/service/CachedSpecService.java new file mode 100644 index 00000000000..df08fb7b384 --- /dev/null +++ b/serving/src/main/java/feast/serving/service/CachedSpecService.java @@ -0,0 +1,165 @@ +package feast.serving.service; + +import static feast.serving.util.mappers.YamlToProtoMapper.yamlToStoreProto; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.CacheLoader.InvalidCacheLoadException; +import com.google.common.cache.LoadingCache; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.Subscription; +import feast.serving.exception.SpecRetrievalException; +import io.grpc.StatusRuntimeException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import lombok.extern.slf4j.Slf4j; + +/** + * In-memory cache of specs. + */ +@Slf4j +public class CachedSpecService { + + private static final int MAX_SPEC_COUNT = 1000; + + private final CoreSpecService coreService; + private final Path configPath; + + private final CacheLoader featureSetSpecCacheLoader; + private final LoadingCache featureSetSpecCache; + private Store store; + + public CachedSpecService(CoreSpecService coreService, Path configPath) { + this.configPath = configPath; + this.coreService = coreService; + this.store = updateStore(readConfig(configPath)); + + Map featureSetSpecs = getFeatureSetSpecMap(); + featureSetSpecCacheLoader = + CacheLoader.from( + (String key) -> featureSetSpecs.get(key)); + featureSetSpecCache = + CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSetSpecCacheLoader); + } + + /** + * Get the current store configuration. + * + * @return StoreProto.Store store configuration for this serving instance + */ + public Store getStore() { + return this.store; + } + + /** + * Get a single FeatureSetSpec matching the given name and version. + * + * @param name of the featureSet + * @param version to retrieve + * @return FeatureSetSpec of the matching FeatureSet + */ + public FeatureSetSpec getFeatureSet(String name, int version) { + String id = String.format("%s:%d", name, version); + try { + return featureSetSpecCache.get(id); + } catch (InvalidCacheLoadException e) { + // if not found, try to retrieve from core + ListFeatureSetsRequest request = ListFeatureSetsRequest.newBuilder() + .setFilter(Filter.newBuilder() + .setFeatureSetName(name) + .setFeatureSetVersion(String.valueOf(version))) + .build(); + ListFeatureSetsResponse featureSets = coreService.listFeatureSets(request); + if (featureSets.getFeatureSetsList().size() == 0) { + throw new SpecRetrievalException( + String.format( + "Unable to retrieve featureSet with id %s from core, featureSet does not exist", + id)); + } + return featureSets.getFeatureSets(0); + } catch (ExecutionException e) { + throw new SpecRetrievalException( + String.format("Unable to retrieve featureSet with id %s", id), e); + } + } + + /** + * Reload the store configuration from the given config path, then retrieve the necessary specs + * from core to preload the cache. + */ + public void populateCache() { + this.store = updateStore(readConfig(configPath)); + Map featureSetSpecMap = getFeatureSetSpecMap(); + featureSetSpecCache.putAll(featureSetSpecMap); + } + + public void scheduledPopulateCache() { + try { + populateCache(); + } catch (Exception e) { + log.warn("Error updating store configuration and specs: {}", e.getMessage()); + } + } + + private Map getFeatureSetSpecMap() { + HashMap featureSetSpecs = new HashMap<>(); + + for (Subscription subscription : this.store.getSubscriptionsList()) { + try { + ListFeatureSetsResponse featureSetsResponse = coreService + .listFeatureSets(ListFeatureSetsRequest.newBuilder() + .setFilter( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion()) + ).build()); + + for (FeatureSetSpec featureSetSpec : featureSetsResponse.getFeatureSetsList()) { + featureSetSpecs + .put(String.format("%s:%s", featureSetSpec.getName(), featureSetSpec.getVersion()), + featureSetSpec); + } + } catch (StatusRuntimeException e) { + throw new RuntimeException( + String.format("Unable to retrieve specs matching subscription %s", subscription), e); + } + } + return featureSetSpecs; + } + + private Store readConfig(Path path) { + try { + List fileContents = Files.readAllLines(path); + String yaml = fileContents.stream().reduce("", (l1, l2) -> l1 + "\n" + l2); + log.info("loaded store config at {}: \n{}", path.toString(), yaml); + return yamlToStoreProto(yaml); + } catch (IOException e) { + throw new RuntimeException( + String.format("Unable to read store config at %s", path.toAbsolutePath()), e); + } + } + + private Store updateStore(Store store) { + UpdateStoreRequest request = UpdateStoreRequest.newBuilder().setStore(store).build(); + try { + UpdateStoreResponse updateStoreResponse = coreService.updateStore(request); + if (!updateStoreResponse.getStore().equals(store)) { + throw new RuntimeException("Core store config not matching current store config"); + } + return updateStoreResponse.getStore(); + } catch (Exception e) { + throw new RuntimeException("Unable to update store configuration", e); + } + } +} diff --git a/serving/src/main/java/feast/serving/service/CachedSpecStorage.java b/serving/src/main/java/feast/serving/service/CachedSpecStorage.java deleted file mode 100644 index 8745d7f9835..00000000000 --- a/serving/src/main/java/feast/serving/service/CachedSpecStorage.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import feast.serving.exception.SpecRetrievalException; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Collections; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; - -/** - * SpecStorage implementation with built-in in-memory cache. - */ -@Slf4j -public class CachedSpecStorage implements SpecStorage { - - private static final int MAX_SPEC_COUNT = 1000; - - private final CoreService coreService; - private final LoadingCache entitySpecCache; - private final CacheLoader entitySpecLoader; - private final LoadingCache featureSpecCache; - private final CacheLoader featureSpecLoader; - private StorageSpec storageSpec; - - public CachedSpecStorage(CoreService coreService) { - this.coreService = coreService; - entitySpecLoader = - CacheLoader.from( - (String key) -> coreService.getEntitySpecs(Collections.singletonList(key)).get(key)); - entitySpecCache = CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(entitySpecLoader); - - featureSpecLoader = - CacheLoader.from( - (String key) -> coreService.getFeatureSpecs(Collections.singletonList(key)).get(key)); - featureSpecCache = - CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSpecLoader); - } - - @Override - public Map getEntitySpecs(Iterable entityIds) { - try { - return entitySpecCache.getAll(entityIds); - } catch (Exception e) { - log.error("Error while retrieving entity spec: {}", e); - throw new SpecRetrievalException("Error while retrieving entity spec", e); - } - } - - @Override - public Map getAllEntitySpecs() { - try { - Map result = coreService.getAllEntitySpecs(); - entitySpecCache.putAll(result); - return result; - } catch (Exception e) { - log.error("Error while retrieving entity spec: {}", e); - throw new SpecRetrievalException("Error while retrieving entity spec", e); - } - } - - @Override - public Map getFeatureSpecs(Iterable featureIds) { - try { - return featureSpecCache.getAll(featureIds); - } catch (Exception e) { - log.error("Error while retrieving feature spec: {}", e); - throw new SpecRetrievalException("Error while retrieving feature spec", e); - } - } - - @Override - public boolean isConnected() { - return coreService.isConnected(); - } - - /** - * Preload all spec into cache. - */ - public void populateCache() { - Map featureSpecMap = coreService.getAllFeatureSpecs(); - featureSpecCache.putAll(featureSpecMap); - - Map entitySpecMap = coreService.getAllEntitySpecs(); - entitySpecCache.putAll(entitySpecMap); - } -} diff --git a/serving/src/main/java/feast/serving/service/CoreService.java b/serving/src/main/java/feast/serving/service/CoreService.java deleted file mode 100644 index 6ab9a103efe..00000000000 --- a/serving/src/main/java/feast/serving/service/CoreService.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.protobuf.Empty; -import feast.core.CoreServiceGrpc; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListFeaturesResponse; -import feast.serving.exception.SpecRetrievalException; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.grpc.ConnectivityState; -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -/** - * Class responsible for retrieving Feature, Entity, and Storage Spec from Feast Core service. - */ -@Slf4j -public class CoreService implements SpecStorage { - - private final ManagedChannel channel; - private final CoreServiceGrpc.CoreServiceBlockingStub blockingStub; - - public CoreService(String host, int port) { - this(ManagedChannelBuilder.forAddress(host, port)); - } - - public CoreService(ManagedChannelBuilder channelBuilder) { - channel = channelBuilder.usePlaintext(true).build(); - blockingStub = CoreServiceGrpc.newBlockingStub(channel); - } - - /** - * Get map of entity ID and {@link EntitySpec} from Core API, given a collection of entityId. - * - * @param entityIds collection of entityId to retrieve. - * @return map of entity ID as key and {@link EntitySpec} value. - * @throws SpecRetrievalException if any error happens during retrieval - */ - public Map getEntitySpecs(Iterable entityIds) { - GetEntitiesRequest request = GetEntitiesRequest.newBuilder().addAllIds(entityIds).build(); - - try { - GetEntitiesResponse response = blockingStub.getEntities(request); - return response - .getEntitiesList() - .stream() - .collect(Collectors.toMap(EntitySpec::getName, Function.identity())); - } catch (StatusRuntimeException e) { - log.error("GRPC error in getEntitySpecs: {}", e.getStatus()); - throw new SpecRetrievalException("Unable to retrieve entity spec", e); - } - } - - /** - * Get all {@link EntitySpec} from Core API. - * - * @return map of entity id as key and {@link EntitySpec} as value. - */ - public Map getAllEntitySpecs() { - try { - ListEntitiesResponse response = blockingStub.listEntities(Empty.getDefaultInstance()); - return response - .getEntitiesList() - .stream() - .collect(Collectors.toMap(EntitySpec::getName, Function.identity())); - } catch (StatusRuntimeException e) { - log.error("GRPC error in getAllEntitySpecs: {}", e.getStatus()); - throw new SpecRetrievalException("Unable to retrieve entity spec", e); - } - } - - /** - * Get map of {@link FeatureSpec} from Core API, given a collection of featureId. - * - * @param featureIds collection of entityId to retrieve. - * @return collection of {@link FeatureSpec} - * @throws SpecRetrievalException if any error happens during retrieval - */ - public Map getFeatureSpecs(Iterable featureIds) { - try { - GetFeaturesRequest request = GetFeaturesRequest.newBuilder().addAllIds(featureIds).build(); - GetFeaturesResponse response = blockingStub.getFeatures(request); - return response - .getFeaturesList() - .stream() - .collect(Collectors.toMap(FeatureSpec::getId, Function.identity())); - } catch (StatusRuntimeException e) { - log.error("GRPC error in getFeatureSpecs: {}", e.getStatus()); - throw new SpecRetrievalException("Unable to retrieve feature specs", e); - } - } - - /** - * Get all {@link FeatureSpec} available in Core API. - * - * @return map of feature id as key and {@link FeatureSpec} as value. - */ - public Map getAllFeatureSpecs() { - try { - ListFeaturesResponse response = blockingStub.listFeatures(Empty.getDefaultInstance()); - return response - .getFeaturesList() - .stream() - .collect(Collectors.toMap(FeatureSpec::getId, Function.identity())); - } catch (StatusRuntimeException e) { - log.error("GRPC error in getAllFeatureSpecs, {}", e.getStatus()); - throw new SpecRetrievalException("Unable to retrieve feature specs", e); - } - } - - /** - * Check whether connection to core service is ready. - * - * @return return true if it is ready. Otherwise, return false. - */ - public boolean isConnected() { - ConnectivityState state = channel.getState(true); - return state == ConnectivityState.IDLE - || state == ConnectivityState.READY; - } - - /** - * Shutdown GRPC channel. - */ - public void shutdown() throws InterruptedException { - log.info("Shutting down CoreService"); - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } -} diff --git a/serving/src/main/java/feast/serving/service/CoreSpecService.java b/serving/src/main/java/feast/serving/service/CoreSpecService.java new file mode 100644 index 00000000000..f37ae619633 --- /dev/null +++ b/serving/src/main/java/feast/serving/service/CoreSpecService.java @@ -0,0 +1,32 @@ +package feast.serving.service; + +import feast.core.CoreServiceGrpc; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import lombok.extern.slf4j.Slf4j; + +/** + * Client for spec retrieval from core. + */ +@Slf4j +public class CoreSpecService { + private final CoreServiceGrpc.CoreServiceBlockingStub blockingStub; + + public CoreSpecService(String feastCoreHost, int feastCorePort) { + ManagedChannel channel = + ManagedChannelBuilder.forAddress(feastCoreHost, feastCorePort).usePlaintext().build(); + blockingStub = CoreServiceGrpc.newBlockingStub(channel); + } + + public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest ListFeatureSetsRequest) { + return blockingStub.listFeatureSets(ListFeatureSetsRequest); + } + + public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) { + return blockingStub.updateStore(updateStoreRequest); + } +} diff --git a/serving/src/main/java/feast/serving/service/FeastServing.java b/serving/src/main/java/feast/serving/service/FeastServing.java deleted file mode 100644 index 1975cc44a44..00000000000 --- a/serving/src/main/java/feast/serving/service/FeastServing.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.common.collect.Sets; -import feast.serving.ServingAPIProto.Entity; -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import feast.serving.ServingAPIProto.QueryFeaturesResponse; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.opentracing.Scope; -import io.opentracing.Tracer; -import java.util.Collection; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -/** - * Core service for feature retrieval. This class is responsible to retrieve featureSpec from core - * API and coordinate feature retrieval from its associated storage. - */ -@Service -@Slf4j -public class FeastServing { - - public static final String SERVING_STORAGE_ID = "SERVING"; - - private final SpecStorage specStorage; - private final Tracer tracer; - private final FeatureRetrievalDispatcher featureRetrievalDispatcher; - - @Autowired - public FeastServing( - FeatureRetrievalDispatcher featureRetrievalDispatcher, - SpecStorage specStorage, - Tracer tracer) { - this.specStorage = specStorage; - this.featureRetrievalDispatcher = featureRetrievalDispatcher; - this.tracer = tracer; - } - - /** - * Query feature from feast storage. - * - * @param request feature query request. - * @return response of the query containing the feature values. - */ - public QueryFeaturesResponse queryFeatures(QueryFeaturesRequest request) { - try (Scope scope = tracer.buildSpan("FeastServing-queryFeatures").startActive(true)) { - Collection featureSpecs = getFeatureSpecs(request.getFeatureIdList()); - - scope.span().log("start retrieving all feature"); - Map result = - featureRetrievalDispatcher.dispatchFeatureRetrieval( - request.getEntityName(), - request.getEntityIdList(), - featureSpecs); - - scope.span().log("finished retrieving all feature"); - - // build response - return QueryFeaturesResponse.newBuilder() - .setEntityName(request.getEntityName()) - .putAllEntities(result) - .build(); - } - } - - /** - * Attach request details with associated feature spec. - * - * @param featureIds collection of feature ID - * @return collection of feature spec - */ - private Collection getFeatureSpecs(Collection featureIds) { - // dedup feature ID. - Collection featureIdSet = Sets.newHashSet(featureIds); - - Map featureSpecMap = specStorage.getFeatureSpecs(featureIdSet); - return featureSpecMap.values(); - } -} diff --git a/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java b/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java deleted file mode 100644 index e9a9ee67bda..00000000000 --- a/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.common.collect.Lists; -import feast.serving.ServingAPIProto.Entity; -import feast.serving.model.FeatureValue; -import feast.serving.util.EntityMapBuilder; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.opentracing.Scope; -import io.opentracing.Tracer; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -@Slf4j -@Service -public class FeatureRetrievalDispatcher { - - private final FeatureStorageRegistry featureStorageRegistry; - private final Tracer tracer; - - @Autowired - public FeatureRetrievalDispatcher( - FeatureStorageRegistry featureStorageRegistry, - Tracer tracer) { - this.featureStorageRegistry = featureStorageRegistry; - this.tracer = tracer; - } - - /** - * Dispatch feature retrieval. - * - *

If request is small enough (only one request type and one source storage) it will be - * executed in the current thread. Otherwise, the execution takes place in separate thread. - * - * @param entityName entity name of the feature. - * @param entityIds list of entity ids. - * @param featureSpecs list of request. - * @return map of entityID and Entity instance. - */ - public Map dispatchFeatureRetrieval( - String entityName, Collection entityIds, Collection featureSpecs) { - - return runInCurrentThread(entityName, entityIds, Lists.newArrayList(featureSpecs)); - } - - /** - * Execute request in current thread. - * - * @param entityName entity name of of the feature. - * @param entityIds list of entity ID of the feature to be retrieved. - * @param featureSpecs list of feature specs - * @return entity map containing the result of feature retrieval. - */ - private Map runInCurrentThread( - String entityName, - Collection entityIds, - List featureSpecs) { - try (Scope scope = - tracer.buildSpan("FeatureRetrievalDispatcher-runInCurrentThread").startActive(true)) { - - String storageId = FeastServing.SERVING_STORAGE_ID; - FeatureStorage featureStorage = featureStorageRegistry.get(storageId); - - List featureValues; - featureValues = featureStorage.getFeature(entityName, entityIds, featureSpecs); - - EntityMapBuilder builder = new EntityMapBuilder(); - builder.addFeatureValueList(featureValues); - return builder.toEntityMap(); - } - } -} diff --git a/serving/src/main/java/feast/serving/service/FeatureStorage.java b/serving/src/main/java/feast/serving/service/FeatureStorage.java deleted file mode 100644 index e1ffb0306ec..00000000000 --- a/serving/src/main/java/feast/serving/service/FeatureStorage.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import feast.serving.exception.FeatureRetrievalException; -import feast.serving.model.FeatureValue; -import feast.specs.FeatureSpecProto.FeatureSpec; -import java.util.Collection; -import java.util.List; - -/** Abstraction of Feast Storage. */ -public interface FeatureStorage { - /** - * Get current value of several feature for the specified entities. - * - * @param entityName entity name, e.g. 'driver', 'customer', 'area' - * @param entityIds list of entity id. - * @param featureSpecs list of feature spec for which the feature should be retrieved. * @param - * @return list of feature value. - * @throws FeatureRetrievalException if anything goes wrong during feature retrieval. - */ - List getFeature( - String entityName, - Collection entityIds, - Collection featureSpecs); -} diff --git a/serving/src/main/java/feast/serving/service/FeatureStorageRegistry.java b/serving/src/main/java/feast/serving/service/FeatureStorageRegistry.java deleted file mode 100644 index d9c47ba6665..00000000000 --- a/serving/src/main/java/feast/serving/service/FeatureStorageRegistry.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.common.annotations.VisibleForTesting; -import feast.serving.config.AppConfig; -import feast.specs.StorageSpecProto.StorageSpec; -import io.opentracing.Tracer; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import lombok.extern.slf4j.Slf4j; -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.JedisPoolConfig; - -/** - * Service providing a mapping of storage ID and its {@link FeatureStorage} - */ -@Slf4j -public class FeatureStorageRegistry { - - private final Map featureStorageMap = new ConcurrentHashMap<>(); - - private final AppConfig appConfig; - private final Tracer tracer; - - public FeatureStorageRegistry(AppConfig appConfig, Tracer tracer) { - this.appConfig = appConfig; - this.tracer = tracer; - } - - /** - * Get the feature storage associated with the given storage ID. - * - * @param storageId e.g. "REDIS1", "BIGTABLE2". - * @return instance of the feature storage if exist. Otherwise return null. - */ - public FeatureStorage get(String storageId) { - return featureStorageMap.get(storageId); - } - - /** - * Connect to a feature storage defined by {@code storageSpec}. Currently supports Redis and - * BigTable. - * - * @param storageSpec storage spec definition of the feature storage. - * @return instance of the feature storage. - * @throws UnsupportedOperationException if the storage type is not supported. - */ - public FeatureStorage connect(StorageSpec storageSpec) { - Map options = storageSpec.getOptionsMap(); - FeatureStorage fs; - - if (storageSpec.getType().equals(BigTableFeatureStorage.TYPE)) { - - fs = new BigTableFeatureStorage(storageSpec); - featureStorageMap.put(storageSpec.getId(), fs); - } else if (storageSpec.getType().equals(RedisFeatureStorage.TYPE)) { - JedisPoolConfig poolConfig = new JedisPoolConfig(); - poolConfig.setMaxTotal(appConfig.getRedisMaxPoolSize()); - poolConfig.setMaxIdle(appConfig.getRedisMaxIdleSize()); - JedisPool jedisPool = - new JedisPool( - poolConfig, - options.get(RedisFeatureStorage.OPT_REDIS_HOST), - Integer.valueOf(options.get(RedisFeatureStorage.OPT_REDIS_PORT))); - fs = new RedisFeatureStorage(jedisPool, tracer); - featureStorageMap.put(storageSpec.getId(), fs); - } else { - log.warn("Unknown storage: {}" + storageSpec); - return null; - } - - return fs; - } - - @VisibleForTesting - public void put(String storageId, FeatureStorage featureStorage) { - featureStorageMap.put(storageId, featureStorage); - } -} diff --git a/serving/src/main/java/feast/serving/service/JobService.java b/serving/src/main/java/feast/serving/service/JobService.java new file mode 100644 index 00000000000..2029afb1535 --- /dev/null +++ b/serving/src/main/java/feast/serving/service/JobService.java @@ -0,0 +1,24 @@ +package feast.serving.service; + +import feast.serving.ServingAPIProto.Job; +import java.util.Optional; + +// JobService interface specifies the operations to manage Job instances internally in Feast + +public interface JobService { + + /** + * Get Job by job id. + * + * @param id job id + * @return feast.serving.ServingAPIProto.Job + */ + Optional get(String id); + + /** + * Update or create a job (if not exists) + * + * @param job feast.serving.ServingAPIProto.Job + */ + void upsert(Job job); +} diff --git a/serving/src/main/java/feast/serving/service/NoopJobService.java b/serving/src/main/java/feast/serving/service/NoopJobService.java new file mode 100644 index 00000000000..8cecb54f094 --- /dev/null +++ b/serving/src/main/java/feast/serving/service/NoopJobService.java @@ -0,0 +1,18 @@ +package feast.serving.service; + +import feast.serving.ServingAPIProto.Job; +import java.util.Optional; + +// No-op implementation of the JobService, for online serving stores. +public class NoopJobService implements JobService { + + @Override + public Optional get(String id) { + return Optional.empty(); + } + + @Override + public void upsert(Job job) { + + } +} diff --git a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java new file mode 100644 index 00000000000..d50541f4faa --- /dev/null +++ b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java @@ -0,0 +1,54 @@ +package feast.serving.service; + +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import feast.serving.ServingAPIProto.Job; +import feast.serving.ServingAPIProto.Job.Builder; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; +import org.joda.time.Duration; +import redis.clients.jedis.Jedis; + +// TODO: Do rate limiting, currently if clients call get() or upsert() +// and an exceedingly high rate e.g. they wrap job reload in a while loop with almost no wait +// Redis connection may break and need to restart Feast serving. Need to handle this. + +@Slf4j +public class RedisBackedJobService implements JobService { + private final Jedis jedis; + // Remove job state info after "defaultExpirySeconds" to prevent filling up Redis memory + // and since users normally don't require info about relatively old jobs. + private final int defaultExpirySeconds = (int) Duration.standardDays(1).getStandardSeconds(); + + public RedisBackedJobService(Jedis jedis) { + this.jedis = jedis; + } + + @Override + public Optional get(String id) { + String json = jedis.get(id); + if (json == null) { + return Optional.empty(); + } + Job job = null; + Builder builder = Job.newBuilder(); + try { + JsonFormat.parser().merge(json, builder); + job = builder.build(); + } catch (Exception e) { + log.error(String.format("Failed to parse JSON for Feast job: %s", e.getMessage())); + } + + return Optional.ofNullable(job); + } + + @Override + public void upsert(Job job) { + try { + jedis.set(job.getId(), JsonFormat.printer().omittingInsignificantWhitespace().print(job)); + jedis.expire(job.getId(), defaultExpirySeconds); + } catch (Exception e) { + log.error(String.format("Failed to upsert job: %s", e.getMessage())); + } + } +} diff --git a/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java b/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java deleted file mode 100644 index d481c9554e9..00000000000 --- a/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import com.google.protobuf.InvalidProtocolBufferException; -import feast.serving.exception.FeatureRetrievalException; -import feast.serving.model.FeatureValue; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.storage.RedisProto.RedisBucketKey; -import feast.storage.RedisProto.RedisBucketValue; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.Tracer; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.codec.digest.DigestUtils; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; - -/** Class for retrieving features from a Redis instance. */ -@Slf4j -public class RedisFeatureStorage implements FeatureStorage { - - public static final String TYPE = "redis"; - // BUCKET_ID_ZERO is used to identify feature with granularity NONE or latest value of a - // time-series feature. - private static final long BUCKET_ID_ZERO = 0; - private static final byte[][] ZERO_LENGTH_ARRAY = new byte[0][0]; - public static String OPT_REDIS_HOST = "host"; - public static String OPT_REDIS_PORT = "port"; - public static String OPT_REDIS_BUCKET_SIZE = "bucketSize"; // ISO8601 Period - private final JedisPool jedisPool; - private final Tracer tracer; - - /** - * Create a RedisFeatureStorage. - * - * @param jedisPool pool of Jedis instance configured to connect to certain Redis instance. - */ - public RedisFeatureStorage(JedisPool jedisPool, Tracer tracer) { - this.jedisPool = jedisPool; - this.tracer = tracer; - } - - /** {@inheritDoc} */ - @Override - public List getFeature( - String entityName, Collection entityIds, Collection featureSpecs) { - try (Scope scope = tracer.buildSpan("Redis-getFeature").startActive(true)) { - List getRequests = new ArrayList<>(entityIds.size() * featureSpecs.size()); - for (FeatureSpec featureSpec : featureSpecs) { - String featureId = featureSpec.getId(); - String featureIdSha1Prefix = makeFeatureIdSha1Prefix(featureId); - for (String entityId : entityIds) { - RedisBucketKey key = makeBucketKey(entityId, featureIdSha1Prefix, BUCKET_ID_ZERO); - getRequests.add(new GetRequest(entityId, featureId, key)); - } - } - scope.span().log("completed request creation"); - return sendAndProcessMultiGet(getRequests); - } - } - - /** - * Send a list of get request as an mget and process its result. - * - * @param getRequests list of get request. - * @return list of feature value. - */ - private List sendAndProcessMultiGet(List getRequests) { - try (Scope scope = tracer.buildSpan("Redis-sendAndProcessMultiGet").startActive(true)) { - Span span = scope.span(); - - if (getRequests.isEmpty()) { - return Collections.emptyList(); - } - - span.log("creating mget request"); - byte[][] binaryKeys = - getRequests - .stream() - .map(r -> r.getKey().toByteArray()) - .collect(Collectors.toList()) - .toArray(ZERO_LENGTH_ARRAY); - span.log("completed creating mget request"); - - List binaryValues; - try (Jedis jedis = jedisPool.getResource()) { - span.log("sending mget"); - binaryValues = jedis.mget(binaryKeys); - span.log("completed mget"); - } catch (Exception e) { - log.error("Exception while retrieving feature from Redis", e); - throw new FeatureRetrievalException("Unable to retrieve feature from Redis", e); - } - - try { - return processMGet(getRequests, binaryValues); - } catch (InvalidProtocolBufferException e) { - log.error("Unable to parse protobuf", e); - throw new FeatureRetrievalException("Unable to parse protobuf while retrieving feature", e); - } - } - } - - /** - * Process mget results given a list of get requests. - * - * @param requests list of get request. - * @param results list of get result. - * @return list of feature value. - * @throws InvalidProtocolBufferException if protobuf parsing fail. - */ - private List processMGet(List requests, List results) - throws InvalidProtocolBufferException { - try (Scope scope = tracer.buildSpan("Redis-processMGet").startActive(true)) { - - int keySize = requests.size(); - List featureValues = new ArrayList<>(keySize); - for (int i = 0; i < keySize; i++) { - GetRequest request = requests.get(i); - byte[] binaryValue = results.get(i); - if (binaryValue == null) { - continue; - } - RedisBucketValue value = RedisBucketValue.parseFrom(binaryValue); - FeatureValue featureValue = - new FeatureValue( - request.getFeatureId(), - request.getEntityId(), - value.getValue(), - value.getEventTimestamp()); - featureValues.add(featureValue); - } - return featureValues; - } - } - - /** - * Create {@link RedisBucketKey}. - * - * @param entityId entityID of the feature - * @param featureIdSha1Prefix first 7-bytes of featureId SHA1 prefix - * @param bucketId bucket ID. - * @return instance of RedisBucketKey - */ - private RedisBucketKey makeBucketKey(String entityId, String featureIdSha1Prefix, long bucketId) { - return RedisBucketKey.newBuilder() - .setEntityKey(entityId) - .setFeatureIdSha1Prefix(featureIdSha1Prefix) - .setBucketId(bucketId) - .build(); - } - - /** - * Calculate feature id's sha1 prefix. - * - * @param featureId feature ID - * @return first 7 characters of SHA1(featureID) - */ - private String makeFeatureIdSha1Prefix(String featureId) { - return DigestUtils.sha1Hex(featureId.getBytes()).substring(0, 7); - } - - @AllArgsConstructor - @Getter - private static class GetRequest { - - /** Entity Id of the get request */ - private String entityId; - - /** Feature Id of the get request */ - private String featureId; - - /** Bucket key of the request */ - private RedisBucketKey key; - } -} diff --git a/serving/src/main/java/feast/serving/service/RedisServingService.java b/serving/src/main/java/feast/serving/service/RedisServingService.java new file mode 100644 index 00000000000..69feec724fb --- /dev/null +++ b/serving/src/main/java/feast/serving/service/RedisServingService.java @@ -0,0 +1,255 @@ +/* + * Copyright 2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package feast.serving.service; + +import com.google.common.collect.Maps; +import com.google.protobuf.AbstractMessageLite; +import com.google.protobuf.Duration; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto.FeastServingType; +import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetJobRequest; +import feast.serving.ServingAPIProto.GetJobResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.storage.RedisProto.RedisKey; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto.Field; +import feast.types.ValueProto.Value; +import io.grpc.Status; +import io.opentracing.Scope; +import io.opentracing.Tracer; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; + +@Slf4j +public class RedisServingService implements ServingService { + + private final JedisPool jedisPool; + private final CachedSpecService specService; + private final Tracer tracer; + + public RedisServingService(JedisPool jedisPool, CachedSpecService specService, Tracer tracer) { + this.jedisPool = jedisPool; + this.specService = specService; + this.tracer = tracer; + } + + /** {@inheritDoc} */ + @Override + public GetFeastServingInfoResponse getFeastServingInfo( + GetFeastServingInfoRequest getFeastServingInfoRequest) { + return GetFeastServingInfoResponse.newBuilder() + .setType(FeastServingType.FEAST_SERVING_TYPE_ONLINE) + .build(); + } + + /** {@inheritDoc} */ + @Override + public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest request) { + try (Scope scope = tracer.buildSpan("Redis-getOnlineFeatures").startActive(true)) { + GetOnlineFeaturesResponse.Builder getOnlineFeaturesResponseBuilder = + GetOnlineFeaturesResponse.newBuilder(); + + List entityRows = request.getEntityRowsList(); + Map> featureValuesMap = + entityRows.stream() + .collect(Collectors.toMap(er -> er, er -> Maps.newHashMap(er.getFieldsMap()))); + + List featureSetRequests = request.getFeatureSetsList(); + for (FeatureSetRequest featureSetRequest : featureSetRequests) { + + FeatureSetSpec featureSetSpec = + specService.getFeatureSet(featureSetRequest.getName(), featureSetRequest.getVersion()); + + List featureSetEntityNames = + featureSetSpec.getEntitiesList().stream() + .map(EntitySpec::getName) + .collect(Collectors.toList()); + + Duration defaultMaxAge = featureSetSpec.getMaxAge(); + if (featureSetRequest.getMaxAge() == Duration.getDefaultInstance()) { + featureSetRequest = featureSetRequest.toBuilder().setMaxAge(defaultMaxAge).build(); + } + + List redisKeys = + getRedisKeys(featureSetEntityNames, entityRows, featureSetRequest); + + try { + sendAndProcessMultiGet(redisKeys, entityRows, featureValuesMap, featureSetRequest); + } catch (InvalidProtocolBufferException e) { + throw Status.INTERNAL + .withDescription("Unable to parse protobuf while retrieving feature") + .withCause(e) + .asRuntimeException(); + } + } + List fieldValues = + featureValuesMap.values().stream() + .map(m -> FieldValues.newBuilder().putAllFields(m).build()) + .collect(Collectors.toList()); + return getOnlineFeaturesResponseBuilder.addAllFieldValues(fieldValues).build(); + } + } + + @Override + public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest) { + throw Status.UNIMPLEMENTED.withDescription("Method not implemented").asRuntimeException(); + } + + @Override + public GetJobResponse getJob(GetJobRequest getJobRequest) { + throw Status.UNIMPLEMENTED.withDescription("Method not implemented").asRuntimeException(); + } + + /** + * Build the redis keys for retrieval from the store. + * + * @param featureSetEntityNames entity names that actually belong to the featureSet + * @param entityRows entity values to retrieve for + * @param featureSetRequest details of the requested featureSet + * @return list of RedisKeys + */ + private List getRedisKeys( + List featureSetEntityNames, + List entityRows, + FeatureSetRequest featureSetRequest) { + try (Scope scope = tracer.buildSpan("Redis-makeRedisKeys").startActive(true)) { + String featureSetId = + String.format("%s:%s", featureSetRequest.getName(), featureSetRequest.getVersion()); + List redisKeys = + entityRows.stream() + .map(row -> makeRedisKey(featureSetId, featureSetEntityNames, row)) + .collect(Collectors.toList()); + return redisKeys; + } + } + + /** + * Create {@link RedisKey} + * + * @param featureSet featureSet reference of the feature. E.g. feature_set_1:1 + * @param featureSetEntityNames entity names that belong to the featureSet + * @param entityRow entityRow to build the key from + * @return {@link RedisKey} + */ + private RedisKey makeRedisKey( + String featureSet, List featureSetEntityNames, EntityRow entityRow) { + RedisKey.Builder builder = RedisKey.newBuilder().setFeatureSet(featureSet); + Map fieldsMap = entityRow.getFieldsMap(); + for (int i = 0; i < featureSetEntityNames.size(); i++) { + String entityName = featureSetEntityNames.get(i); + + if (!fieldsMap.containsKey(entityName)){ + throw Status.INVALID_ARGUMENT + .withDescription(String.format("Entity row fields \"%s\" does not contain required entity field \"%s\"", fieldsMap.keySet().toString(), entityName)) + .asRuntimeException(); + } + + builder.addEntities( + Field.newBuilder().setName(entityName).setValue(fieldsMap.get(entityName))); + } + return builder.build(); + } + + private void sendAndProcessMultiGet( + List redisKeys, + List entityRows, + Map> featureValuesMap, + FeatureSetRequest featureSetRequest) + throws InvalidProtocolBufferException { + + List jedisResps = sendMultiGet(redisKeys); + + try (Scope scope = tracer.buildSpan("Redis-processResponse").startActive(true)) { + String featureSetId = + String.format("%s:%d", featureSetRequest.getName(), featureSetRequest.getVersion()); + Map nullValues = + featureSetRequest.getFeatureNamesList().stream() + .collect( + Collectors.toMap( + name -> featureSetId + ":" + name, name -> Value.newBuilder().build())); + for (int i = 0; i < jedisResps.size(); i++) { + EntityRow entityRow = entityRows.get(i); + Map featureValues = featureValuesMap.get(entityRow); + byte[] jedisResponse = jedisResps.get(i); + if (jedisResponse == null) { + featureValues.putAll(nullValues); + continue; + } + FeatureRow featureRow = FeatureRow.parseFrom(jedisResponse); + boolean stale = isStale(featureSetRequest, entityRow, featureRow); + if (stale) { + featureValues.putAll(nullValues); + continue; + } + featureRow.getFieldsList().stream() + .filter(f -> featureSetRequest.getFeatureNamesList().contains(f.getName())) + .forEach(f -> featureValues.put(featureSetId + ":" + f.getName(), f.getValue())); + } + } + } + + private boolean isStale( + FeatureSetRequest featureSetRequest, EntityRow entityRow, FeatureRow featureRow) { + if (featureSetRequest.getMaxAge() == Duration.getDefaultInstance()) { + return false; + } + long givenTimestamp = entityRow.getEntityTimestamp().getSeconds(); + if (givenTimestamp == 0) { + givenTimestamp = System.currentTimeMillis() / 1000; + } + long timeDifference = givenTimestamp - featureRow.getEventTimestamp().getSeconds(); + return timeDifference > featureSetRequest.getMaxAge().getSeconds(); + } + + /** + * Send a list of get request as an mget + * + * @param keys list of {@link RedisKey} + * @return list of {@link FeatureRow} in primitive byte representation for each {@link RedisKey} + */ + private List sendMultiGet(List keys) { + try (Scope scope = tracer.buildSpan("Redis-sendMultiGet").startActive(true)) { + try (Jedis jedis = jedisPool.getResource()) { + byte[][] binaryKeys = + keys.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()) + .toArray(new byte[0][0]); + return jedis.mget(binaryKeys); + } catch (Exception e) { + throw Status.NOT_FOUND + .withDescription("Unable to retrieve feature from Redis") + .withCause(e) + .asRuntimeException(); + } + } + } +} diff --git a/serving/src/main/java/feast/serving/service/ServingService.java b/serving/src/main/java/feast/serving/service/ServingService.java new file mode 100644 index 00000000000..3c8c075c158 --- /dev/null +++ b/serving/src/main/java/feast/serving/service/ServingService.java @@ -0,0 +1,21 @@ +package feast.serving.service; + +import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.serving.ServingAPIProto.GetJobRequest; +import feast.serving.ServingAPIProto.GetJobResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; + +public interface ServingService { + GetFeastServingInfoResponse getFeastServingInfo( + GetFeastServingInfoRequest getFeastServingInfoRequest); + + GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest getFeaturesRequest); + + GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest); + + GetJobResponse getJob(GetJobRequest getJobRequest); +} diff --git a/serving/src/main/java/feast/serving/service/SpecStorage.java b/serving/src/main/java/feast/serving/service/SpecStorage.java deleted file mode 100644 index b304c34babf..00000000000 --- a/serving/src/main/java/feast/serving/service/SpecStorage.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import feast.serving.exception.SpecRetrievalException; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Map; - -/** - * Abstraction of service which provides {@link EntitySpec}, {@link FeatureSpec}, and {@link - * StorageSpec}. - */ -public interface SpecStorage { - - /** - * Get a map of {@link EntitySpec} from Core API, given a collection of entityId. - * - * @param entityIds collection of entityId to retrieve. - * @return map of {@link EntitySpec}, where the key is entity name. - * @throws SpecRetrievalException if any error happens during retrieval - */ - Map getEntitySpecs(Iterable entityIds); - - /** - * Get all {@link EntitySpec} from Core API. - * - * @return map of {@link EntitySpec}, where the key is the entity name. - */ - Map getAllEntitySpecs(); - - /** - * Get a map of {@link FeatureSpec} from Core API, given a collection of featureId. - * - * @param featureIds collection of entityId to retrieve. - * @return map of {@link FeatureSpec}, where the key is feature id. - * @throws SpecRetrievalException if any error happens during retrieval - */ - Map getFeatureSpecs(Iterable featureIds); - - /** - * Check whether connection to spec storage is ready. - * - * @return return true if it is ready. Otherwise, return false. - */ - boolean isConnected(); -} diff --git a/serving/src/main/java/feast/serving/util/BigQueryUtil.java b/serving/src/main/java/feast/serving/util/BigQueryUtil.java new file mode 100644 index 00000000000..390bea19b56 --- /dev/null +++ b/serving/src/main/java/feast/serving/util/BigQueryUtil.java @@ -0,0 +1,104 @@ +package feast.serving.util; + +import com.google.protobuf.Duration; +import com.mitchellbosecke.pebble.PebbleEngine; +import com.mitchellbosecke.pebble.template.PebbleTemplate; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; + +public class BigQueryUtil { + + private static final PebbleEngine engine = new PebbleEngine.Builder().build(); + private static final String FEATURESET_TEMPLATE_NAME = "templates/bq_featureset_query.sql"; + + @Value + public static class FeatureSetInfo { + String id; + String name; + int version; + long maxAge; + List entities; + List features; + } + + public static String getTimestampLimitQuery(String projectId, String datasetId, + String leftTableName) { + return String.format( + "SELECT DATETIME(MAX(event_timestamp)) as max, DATETIME(MIN(event_timestamp)) as min FROM `%s.%s.%s`", + projectId, datasetId, leftTableName); + } + + public static String createQuery( + List featureSets, + List featureSetSpecs, + List entities, + String projectId, + String bigqueryDataset, + String leftTableName, + String minTimestamp, + String maxTimestamp) throws IOException { + + if (featureSets == null + || featureSetSpecs == null + || bigqueryDataset.isEmpty()) { + return ""; + } + + if (featureSets.size() != featureSetSpecs.size()) { + return ""; + } + + List featureSetInfos = new ArrayList<>(); + for (int i = 0; i < featureSets.size(); i++) { + FeatureSetSpec spec = featureSetSpecs.get(i); + FeatureSetRequest request = featureSets.get(i); + Duration maxAge = getMaxAge(request, spec); + List fsEntities = spec.getEntitiesList().stream().map(EntitySpec::getName) + .collect(Collectors.toList()); + String id = String.format("%s:%s", spec.getName(), spec.getVersion()); + featureSetInfos.add(new FeatureSetInfo(id, spec.getName(), spec.getVersion(), maxAge.getSeconds(), fsEntities, request.getFeatureNamesList())); + } + return createQueryForFeatureSets(featureSetInfos, entities, projectId, bigqueryDataset, leftTableName, minTimestamp, maxTimestamp); + } + + public static String createQueryForFeatureSets( + List featureSetInfos, + List entities, + String projectId, + String datasetId, + String leftTableName, + String minTimestamp, + String maxTimestamp) throws IOException { + + PebbleTemplate template = engine.getTemplate(FEATURESET_TEMPLATE_NAME); + Map context = new HashMap<>(); + context.put("featureSets", featureSetInfos); + context.put("fullEntitiesList", entities); + context.put("projectId", projectId); + context.put("datasetId", datasetId); + context.put("minTimestamp", minTimestamp); + context.put("maxTimestamp", maxTimestamp); + context.put("leftTableName", leftTableName); + + Writer writer = new StringWriter(); + template.evaluate(writer, context); + return writer.toString(); + } + + private static Duration getMaxAge(FeatureSetRequest featureSet, FeatureSetSpec featureSetSpec) { + if (featureSet.getMaxAge() == Duration.getDefaultInstance()) { + return featureSetSpec.getMaxAge(); + } + return featureSet.getMaxAge(); + } +} diff --git a/serving/src/main/java/feast/serving/util/EntityMapBuilder.java b/serving/src/main/java/feast/serving/util/EntityMapBuilder.java deleted file mode 100644 index 5dc6a52e86b..00000000000 --- a/serving/src/main/java/feast/serving/util/EntityMapBuilder.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.util; - -import feast.serving.ServingAPIProto; -import feast.serving.ServingAPIProto.Entity; -import feast.serving.model.FeatureValue; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** Utility class for converting response from Feature Storage into {@link Map}. */ -public class EntityMapBuilder { - - private final Map> backingMap = new ConcurrentHashMap<>(); - - public void addFeatureValueList(List features) { - for (FeatureValue feature : features) { - String entityId = feature.getEntityId(); - String featureId = feature.getFeatureId(); - - Map featureIdToFeatureValueMap = - backingMap.computeIfAbsent(entityId, k -> new ConcurrentHashMap<>()); - - featureIdToFeatureValueMap.put(featureId, feature); - } - } - - /** - * Build an entity map out of {@code this}. - * - * @return Entity Map - */ - public Map toEntityMap() { - Map resultMap = new HashMap<>(); - for (Map.Entry> entity : backingMap.entrySet()) { - String entityId = entity.getKey(); - Entity.Builder entityBuilder = Entity.newBuilder(); - for (Map.Entry feature : entity.getValue().entrySet()) { - String featureId = feature.getKey(); - FeatureValue featureValue = feature.getValue(); - ServingAPIProto.FeatureValue featureValueProto = ServingAPIProto.FeatureValue.newBuilder() - .setTimestamp(featureValue.getTimestamp()) - .setValue(featureValue.getValue()) - .build(); - - entityBuilder.putFeatures(featureId, featureValueProto); - } - resultMap.put(entityId, entityBuilder.build()); - } - return resultMap; - } -} diff --git a/serving/src/main/java/feast/serving/util/RequestHelper.java b/serving/src/main/java/feast/serving/util/RequestHelper.java index c5e915dc27e..445e781eda8 100644 --- a/serving/src/main/java/feast/serving/util/RequestHelper.java +++ b/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -1,51 +1,30 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - package feast.serving.util; -import com.google.common.base.Strings; -import feast.serving.ServingAPIProto.QueryFeaturesRequest; +import feast.serving.ServingAPIProto.DataFormat; +import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import io.grpc.Status; public class RequestHelper { - private RequestHelper() {} - - public static void validateRequest(QueryFeaturesRequest request) { - // entity name shall present - if (Strings.isNullOrEmpty(request.getEntityName())) { - throw new IllegalArgumentException("entity name must be set"); - } - // entity id list shall not be empty - if (request.getEntityIdList().size() <= 0) { - throw new IllegalArgumentException("entity ID must be provided"); + public static void validateOnlineRequest(GetOnlineFeaturesRequest request) { + // EntityDataSetRow shall not be empty + if (request.getEntityRowsCount() <= 0) { + throw Status.INVALID_ARGUMENT.withDescription("Entity value must be provided") + .asRuntimeException(); } + } - // feature IDs shall not be empty - if (request.getFeatureIdCount() <= 0) { - throw new IllegalArgumentException("feature id must be provided"); + public static void validateBatchRequest(GetBatchFeaturesRequest getFeaturesRequest) { + if (!getFeaturesRequest.hasDatasetSource()) { + throw Status.INVALID_ARGUMENT.withDescription("Dataset source must be provided") + .asRuntimeException(); } - // feature id in each request detail shall have same entity name - String entityName = request.getEntityName(); - for (String featureId : request.getFeatureIdList()) { - if (!featureId.substring(0, featureId.indexOf(".")).equals(entityName)) { - throw new IllegalArgumentException( - "entity name of all feature ID in request details must be: " + entityName); - } + if (!getFeaturesRequest.getDatasetSource().hasFileSource()) { + throw Status.INVALID_ARGUMENT + .withDescription("Dataset source must be provided: only file source supported") + .asRuntimeException(); } } } diff --git a/serving/src/main/java/feast/serving/util/StatsUtil.java b/serving/src/main/java/feast/serving/util/StatsUtil.java deleted file mode 100644 index 86437218e8d..00000000000 --- a/serving/src/main/java/feast/serving/util/StatsUtil.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package feast.serving.util; - -import com.google.common.base.Strings; -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import io.grpc.Context; -import io.grpc.Context.Key; -import java.net.SocketAddress; -import java.util.ArrayList; -import java.util.List; - -/** - * Utility class for statistics. - */ -public class StatsUtil { - - public static final Key REMOTE_ADDRESS = Context.key("remote-address"); - - private StatsUtil() { - } - - /** - * Create Statsd Tag for a request. - *

The tags contain information about feature's ids of the request and the client requesting - * it. - */ - public static String[] makeStatsdTags(QueryFeaturesRequest request) { - List featureTags = makeFeatureTags(request); - String remoteAddrTag = makeRemoteAddressTag(); - String[] tags = featureTags.toArray(new String[featureTags.size() + 1]); - tags[featureTags.size()] = remoteAddrTag; - return tags; - } - - private static List makeFeatureTags(QueryFeaturesRequest request) { - List tags = new ArrayList<>(request.getFeatureIdCount()); - for (String featureId : request.getFeatureIdList()) { - if (Strings.isNullOrEmpty(featureId)) { - continue; - } - String featureTag = makeFeatureTag(featureId); - tags.add(featureTag); - } - return tags; - } - - private static String makeFeatureTag(String featureId) { - return "feature:" + featureId; - } - - private static String makeRemoteAddressTag() { - SocketAddress socketAddress = REMOTE_ADDRESS.get(); - if (socketAddress == null) { - return "remote:unknown"; - } - return "remote:" + socketAddress.toString(); - } -} diff --git a/serving/src/main/java/feast/serving/util/TimeUtil.java b/serving/src/main/java/feast/serving/util/TimeUtil.java deleted file mode 100644 index 24c4aa8950d..00000000000 --- a/serving/src/main/java/feast/serving/util/TimeUtil.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.util; - -/** Utility class for time-related function. */ -public class TimeUtil { - public static final int NANO_IN_MICRO = 1000; - - private TimeUtil() {} - - - /** - * Returns the current value of the running Java Virtual Machine's high-resolution time source, in - * microseconds. - * - * @return current micro time. - * @see System#nanoTime() - */ - public static long microTime() { - return System.nanoTime() / NANO_IN_MICRO; - } -} diff --git a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java new file mode 100644 index 00000000000..550601a687a --- /dev/null +++ b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java @@ -0,0 +1,59 @@ +package feast.serving.util.mappers; + +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.types.ValueProto.Value; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +// ResponseJSONMapper maps GRPC Response types to more human readable JSON responses +public class ResponseJSONMapper { + + public static List> mapGetOnlineFeaturesResponse(GetOnlineFeaturesResponse response) { + return response.getFieldValuesList().stream() + .map(fieldValue -> convertFieldValuesToMap(fieldValue)) + .collect(Collectors.toList()); + } + + private static Map convertFieldValuesToMap(FieldValues fieldValues) { + return fieldValues.getFieldsMap().entrySet().stream() + .collect(Collectors.toMap(es -> es.getKey(), es -> extractValue(es.getValue()))); + } + + private static Object extractValue(Value value) { + switch (value.getValCase().getNumber()) { + case 1: + return value.getBytesVal(); + case 2: + return value.getStringVal(); + case 3: + return value.getInt32Val(); + case 4: + return value.getInt64Val(); + case 5: + return value.getDoubleVal(); + case 6: + return value.getFloatVal(); + case 7: + return value.getBoolVal(); + case 11: + return value.getBytesListVal(); + case 12: + return value.getStringListVal(); + case 13: + return value.getInt32ListVal(); + case 14: + return value.getInt64ListVal(); + case 15: + return value.getDoubleListVal(); + case 16: + return value.getFloatListVal(); + case 17: + return value.getBoolListVal(); + default: + return null; + } + } + +} diff --git a/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java b/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java new file mode 100644 index 00000000000..9391cab2939 --- /dev/null +++ b/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java @@ -0,0 +1,21 @@ +package feast.serving.util.mappers; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.google.protobuf.util.JsonFormat; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.Builder; +import java.io.IOException; + +public class YamlToProtoMapper { + private static final ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory()); + private static final ObjectMapper jsonWriter = new ObjectMapper(); + + public static Store yamlToStoreProto(String yaml) throws IOException { + Object obj = yamlReader.readValue(yaml, Object.class); + String jsonString = jsonWriter.writeValueAsString(obj); + Builder builder = Store.newBuilder(); + JsonFormat.parser().merge(jsonString, builder); + return builder.build(); + } +} diff --git a/serving/src/main/resources/application.properties b/serving/src/main/resources/application.properties deleted file mode 100644 index f7d00236381..00000000000 --- a/serving/src/main/resources/application.properties +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright 2018 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -server.port=${FEAST_SERVING_HTTP_PORT:8080} -grpc.port=${FEAST_SERVING_GRPC_PORT:6566} -feast.core.host=${FEAST_CORE_HOST:localhost} -feast.core.grpc.port=${FEAST_CORE_GRPC_PORT:6565} - -feast.store.serving.type = ${STORE_SERVING_TYPE:} -feast.store.serving.options = ${STORE_SERVING_OPTIONS:{}} - -feast.threadpool.max=${FEAST_MAX_NB_THREAD:128} -feast.maxentity=${FEAST_MAX_ENTITY_PER_BATCH:2000} -feast.timeout=${FEAST_RETRIEVAL_TIMEOUT:5} -feast.redispool.maxsize=${FEAST_REDIS_POOL_MAX_SIZE:128} -feast.redispool.maxidle=${FEAST_REDIS_POOL_MAX_IDLE:16} -feast.cacheDurationMinute=${FEAST_SPEC_CACHE_DURATION_MINUTE:5} - -statsd.host= ${STATSD_HOST:localhost} -statsd.port= ${STATSD_PORT:8125} - -management.metrics.export.simple.enabled=false -management.metrics.export.statsd.enabled=true -management.metrics.export.statsd.host=${STATSD_HOST:localhost} -management.metrics.export.statsd.port=${STATSD_PORT:8125} \ No newline at end of file diff --git a/serving/src/main/resources/application.yml b/serving/src/main/resources/application.yml new file mode 100644 index 00000000000..6ef68d710cf --- /dev/null +++ b/serving/src/main/resources/application.yml @@ -0,0 +1,60 @@ +feast: + # This value is retrieved from project.version properties in pom.xml + # https://docs.spring.io/spring-boot/docs/current/reference/html/ + version: @project.version@ + # GRPC service address for Feast Core + # Feast Serving requires connection to Feast Core to retrieve and reload Feast metadata (e.g. FeatureSpecs, Store information) + core-host: ${FEAST_CORE_HOST:localhost} + core-grpc-port: ${FEAST_CORE_GRPC_PORT:6565} + + tracing: + # If true, Feast will provide tracing data (using OpenTracing API) for various RPC method calls + # which can be useful to debug performance issues and perform benchmarking + enabled: false + # Only Jaeger tracer is supported currently + # https://opentracing.io/docs/supported-tracers/ + tracer-name: jaeger + # The service name identifier for the tracing data + service-name: feast_serving + + store: + # Path containing the store configuration for this serving store. + config-path: ${FEAST_STORE_CONFIG_PATH:./sample_redis_config.yml} + # If serving redis, the redis pool max size + redis-pool-max-size: ${FEAST_REDIS_POOL_MAX_SIZE:128} + # If serving redis, the redis pool max idle conns + redis-pool-max-idle: ${FEAST_REDIS_POOL_MAX_IDLE:16} + + jobs: + # job-staging-location specifies the URI to store intermediate files for batch serving. + # Feast Serving client is expected to have read access to this staging location + # to download the batch features. + # + # For example: gs://mybucket/myprefix + # Please omit the trailing slash in the URI. + staging-location: ${FEAST_JOB_STAGING_LOCATION:} + # Type of store to store job metadata. This only needs to be set if the + # serving store type is Bigquery. + store-type: ${FEAST_JOB_STORE_TYPE:} + # + # Job store connection options. If the job store is redis, the following items are required: + # + # store-options: + # host: localhost + # port: 6379 + store-options: {} + +grpc: + # The port number Feast Serving GRPC service should listen on + # It is set default to 6566 so it does not conflict with the GRPC server on Feast Core + # which defaults to port 6565 + port: ${GRPC_PORT:6566} + # This allows client to discover GRPC endpoints easily + # https://github.com/grpc/grpc-java/blob/master/documentation/server-reflection-tutorial.md + enable-reflection: ${GRPC_ENABLE_REFLECTION:true} + +server: + # The port number on which the Tomcat webserver that serves REST API endpoints should listen + # It is set by default to 8080 so it does not conflict with Tomcat webserver on Feast Core + # if both Feast Core and Serving are running on the same machine + port: ${SERVER_PORT:8081} \ No newline at end of file diff --git a/serving/src/main/resources/banner.txt b/serving/src/main/resources/banner.txt index e20ae42fb77..44aa9c4cad9 100644 --- a/serving/src/main/resources/banner.txt +++ b/serving/src/main/resources/banner.txt @@ -1,26 +1,14 @@ - - -FFFFFFFFFFFFFFFFFFFFFF tttt -F::::::::::::::::::::F ttt:::t -F::::::::::::::::::::F t:::::t -FF::::::FFFFFFFFF::::F t:::::t - F:::::F FFFFFF eeeeeeeeeeee aaaaaaaaaaaaa ssssssssss ttttttt:::::ttttttt - F:::::F ee::::::::::::ee a::::::::::::a ss::::::::::s t:::::::::::::::::t - F::::::FFFFFFFFFF e::::::eeeee:::::ee aaaaaaaaa:::::a ss:::::::::::::s t:::::::::::::::::t - F:::::::::::::::F e::::::e e:::::e a::::a s::::::ssss:::::stttttt:::::::tttttt - F:::::::::::::::F e:::::::eeeee::::::e aaaaaaa:::::a s:::::s ssssss t:::::t - F::::::FFFFFFFFFF e:::::::::::::::::e aa::::::::::::a s::::::s t:::::t - F:::::F e::::::eeeeeeeeeee a::::aaaa::::::a s::::::s t:::::t - F:::::F e:::::::e a::::a a:::::a ssssss s:::::s t:::::t tttttt -FF:::::::FF e::::::::e a::::a a:::::a s:::::ssss::::::s t::::::tttt:::::t -F::::::::FF e::::::::eeeeeeee a:::::aaaa::::::a s::::::::::::::s tt::::::::::::::t -F::::::::FF ee:::::::::::::e a::::::::::aa:::a s:::::::::::ss tt:::::::::::tt -FFFFFFFFFFF eeeeeeeeeeeeee aaaaaaaaaa aaaa sssssssssss ttttttttttt - - - - - - - +███████╗███████╗ █████╗ ███████╗████████╗ +██╔════╝██╔════╝██╔══██╗██╔════╝╚══██╔══╝ +█████╗ █████╗ ███████║███████╗ ██║ +██╔══╝ ██╔══╝ ██╔══██║╚════██║ ██║ +██║ ███████╗██║ ██║███████║ ██║ +╚═╝ ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═╝ + +███████╗███████╗██████╗ ██╗ ██╗██╗███╗ ██╗ ██████╗ +██╔════╝██╔════╝██╔══██╗██║ ██║██║████╗ ██║██╔════╝ +███████╗█████╗ ██████╔╝██║ ██║██║██╔██╗ ██║██║ ███╗ +╚════██║██╔══╝ ██╔══██╗╚██╗ ██╔╝██║██║╚██╗██║██║ ██║ +███████║███████╗██║ ██║ ╚████╔╝ ██║██║ ╚████║╚██████╔╝ +╚══════╝╚══════╝╚═╝ ╚═╝ ╚═══╝ ╚═╝╚═╝ ╚═══╝ ╚═════╝ diff --git a/serving/src/main/resources/log4j2.xml b/serving/src/main/resources/log4j2.xml index d9aa6ff9ee6..02520cb36c2 100644 --- a/serving/src/main/resources/log4j2.xml +++ b/serving/src/main/resources/log4j2.xml @@ -16,7 +16,7 @@ ~ --> - + %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${hostName} --- [%15.15t] %-40.40c{1.} : %m%n%ex diff --git a/serving/src/main/resources/templates/bq_featureset_query.sql b/serving/src/main/resources/templates/bq_featureset_query.sql new file mode 100644 index 00000000000..237fe3ba945 --- /dev/null +++ b/serving/src/main/resources/templates/bq_featureset_query.sql @@ -0,0 +1,84 @@ +WITH union_features AS (SELECT + event_timestamp, + {% for featureSet in featureSets %}NULL as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + {% endfor %}{{ fullEntitiesList | join(', ')}}, + true AS is_entity_table +FROM `{{projectId}}.{{datasetId}}.{{leftTableName}}` +{% for featureSet in featureSets %} +UNION ALL +SELECT + event_timestamp, + {% for otherFeatureSet in featureSets %} + {% if otherFeatureSet.id != featureSet.id %} + NULL as {{ otherFeatureSet.name }}_v{{ otherFeatureSet.version }}_feature_timestamp, + {% else %} + event_timestamp as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + {% endif %} + {% endfor %} + {% for entityName in fullEntitiesList %} + {% if featureSet.entities contains entityName %} + {{entityName}}, + {% else %} + NULL as {{entityName}}, + {% endif %} + {% endfor %} + false AS is_entity_table +FROM `{{projectId}}.{{datasetId}}.{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) +{% endfor %} +), ts_union AS ( +{% for featureSet in featureSets %} + SELECT * FROM ( + SELECT + event_timestamp, + {{ fullEntitiesList | join(', ')}}, + {% for otherFeatureSet in featureSets %} + {% if otherFeatureSet.id == featureSet.id %} + LAST_VALUE({{ otherFeatureSet.name }}_v{{ otherFeatureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ otherFeatureSet.name }}_v{{ otherFeatureSet.version }}_feature_timestamp, + {% else %} + {{ otherFeatureSet.name }}_v{{ otherFeatureSet.version }}_feature_timestamp, + {% endif %} + {% endfor %} + is_entity_table + FROM union_features + WINDOW w AS (PARTITION BY {{ featureSet.entities | join(', ') }} ORDER BY event_timestamp, is_entity_table ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) + ) WHERE is_entity_table + {% if loop.last %} + {% else %} + UNION ALL + {% endif %} +{% endfor %} +), ts_coalesce AS ( + SELECT + event_timestamp, + {{ fullEntitiesList | join(', ')}}, + {% for featureSet in featureSets %} + LAST_VALUE({{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + {% endfor %} + ROW_NUMBER() over w as rn + FROM ts_union + WINDOW w AS (PARTITION BY {{ fullEntitiesList | join(', ')}}, event_timestamp ORDER BY event_timestamp) +), ts_final AS ( +SELECT + event_timestamp, + {{ fullEntitiesList | join(', ')}}, + {% for featureSet in featureSets %} + IF(event_timestamp >= {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, NULL) as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM ts_coalesce WHERE rn = 1 +) +SELECT * FROM ts_final +{% for featureSet in featureSets %} +LEFT JOIN +(SELECT {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + {% for featureName in featureSet.features %}{{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}, + {% endfor %}{{ featureSet.entities | join(', ') }} + FROM (SELECT + event_timestamp as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + {{ featureSet.entities | join(', ') }}, + {% for featureName in featureSet.features %} + {{ featureName }} as {{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}, + {% endfor %}ROW_NUMBER() OVER(PARTITION BY event_timestamp, {{ featureSet.entities | join(', ') }} ORDER BY created_timestamp DESC) as {{ featureSet.name }}_v{{ featureSet.version }}_rown + FROM `{{ projectId }}.{{ datasetId }}.{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) +) WHERE {{ featureSet.name }}_v{{ featureSet.version }}_rown = 1 +) USING ({{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, {{ featureSet.entities | join(', ') }}) +{% endfor %} diff --git a/serving/src/test/java/feast/serving/ServingApiApplicationTests.java b/serving/src/test/java/feast/serving/ServingApiApplicationTests.java deleted file mode 100644 index 5208c4ae353..00000000000 --- a/serving/src/test/java/feast/serving/ServingApiApplicationTests.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class ServingApiApplicationTests { - - @Test - public void contextLoads() {} -} diff --git a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java new file mode 100644 index 00000000000..329dc21e07f --- /dev/null +++ b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java @@ -0,0 +1,69 @@ +package feast.serving.controller; + +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.Lists; +import com.google.protobuf.Timestamp; +import feast.serving.FeastProperties; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.service.ServingService; +import feast.types.ValueProto.Value; +import io.grpc.StatusRuntimeException; +import io.grpc.stub.StreamObserver; +import io.jaegertracing.Configuration; +import io.opentracing.Tracer; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; + +public class ServingServiceGRpcControllerTest { + + @Mock + private ServingService mockServingService; + + @Mock + private StreamObserver mockStreamObserver; + + private GetOnlineFeaturesRequest validRequest; + + private ServingServiceGRpcController service; + + @Before + public void setUp() { + initMocks(this); + + validRequest = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", Value.newBuilder().setInt64Val(1).build()) + .putFields("entity2", Value.newBuilder().setInt64Val(1).build())) + .build(); + + Tracer tracer = Configuration.fromEnv("dummy").getTracer(); + FeastProperties feastProperties = new FeastProperties(); + service = new ServingServiceGRpcController(mockServingService, feastProperties, tracer); + } + + @Test + public void shouldPassValidRequestAsIs() { + service.getOnlineFeatures(validRequest, mockStreamObserver); + Mockito.verify(mockServingService).getOnlineFeatures(validRequest); + } + + @Test + public void shouldCallOnErrorIfEntityDatasetIsNotSet() { + GetOnlineFeaturesRequest missingEntityName = + GetOnlineFeaturesRequest.newBuilder(validRequest).clearEntityRows().build(); + service.getOnlineFeatures(missingEntityName, mockStreamObserver); + Mockito.verify(mockStreamObserver).onError(Mockito.any(StatusRuntimeException.class)); + } +} \ No newline at end of file diff --git a/serving/src/test/java/feast/serving/grpc/FeastServingTest.java b/serving/src/test/java/feast/serving/grpc/FeastServingTest.java deleted file mode 100644 index b7b8c9b1fdd..00000000000 --- a/serving/src/test/java/feast/serving/grpc/FeastServingTest.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.grpc; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.verify; - -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import feast.serving.ServingAPIProto.QueryFeaturesResponse; -import feast.serving.service.FeastServing; -import feast.serving.service.FeatureRetrievalDispatcher; -import feast.serving.service.FeatureStorageRegistry; -import feast.serving.service.RedisFeatureStorage; -import feast.serving.service.SpecStorage; -import feast.serving.testutil.FakeSpecStorage; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.opentracing.util.GlobalTracer; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class FeastServingTest { - - SpecStorage specStorage; - - @Mock - FeatureStorageRegistry featureStorageRegistry; - @Mock - FeatureRetrievalDispatcher featureRetrievalDispatcher; - - // class under test - private FeastServing feast; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - specStorage = new FakeSpecStorage(RedisFeatureStorage.TYPE); - feast = new FeastServing( - featureRetrievalDispatcher, specStorage, GlobalTracer.get()); - } - - @Test - public void shouldReturnSameEntityNameAsRequest() { - String entityName = "driver"; - QueryFeaturesRequest request = - QueryFeaturesRequest.newBuilder() - .setEntityName(entityName) - .addFeatureId("driver.total_completed_booking") - .build(); - - QueryFeaturesResponse response = feast.queryFeatures(request); - - assertNotNull(response); - assertThat(response.getEntityName(), equalTo(entityName)); - } - - @Test - public void shouldPassValidRequestToFeatureRetrievalDispatcher() { - String entityName = "driver"; - Collection entityIds = Arrays.asList("entity1", "entity2", "entity3"); - Collection featureIds = Arrays.asList("driver.total_completed_booking"); - QueryFeaturesRequest request = - QueryFeaturesRequest.newBuilder() - .setEntityName(entityName) - .addAllEntityId(entityIds) - .addAllFeatureId(featureIds) - .build(); - - ArgumentCaptor entityNameArg = ArgumentCaptor.forClass(String.class); - ArgumentCaptor> entityIdsArg = ArgumentCaptor.forClass(List.class); - ArgumentCaptor> featureSpecArg = - ArgumentCaptor.forClass(Collection.class); - - QueryFeaturesResponse response = feast.queryFeatures(request); - verify(featureRetrievalDispatcher) - .dispatchFeatureRetrieval( - entityNameArg.capture(), entityIdsArg.capture(), featureSpecArg.capture()); - - assertNotNull(response); - assertThat(response.getEntityName(), equalTo(entityName)); - assertThat(entityNameArg.getValue(), equalTo(entityName)); - assertThat(entityIdsArg.getValue(), containsInAnyOrder(entityIds.toArray())); - } -} diff --git a/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java b/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java deleted file mode 100644 index d7eff6af4d8..00000000000 --- a/serving/src/test/java/feast/serving/grpc/ServingGrpcServiceTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.grpc; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.verify; - -import com.timgroup.statsd.StatsDClient; -import feast.serving.ServingAPIProto.QueryFeaturesRequest; -import feast.serving.ServingAPIProto.QueryFeaturesResponse; -import feast.serving.service.FeastServing; -import io.grpc.StatusRuntimeException; -import io.grpc.stub.StreamObserver; -import io.jaegertracing.Configuration; -import io.opentracing.Tracer; -import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class ServingGrpcServiceTest { - - @Mock private FeastServing mockFeast; - - @Mock private StreamObserver mockStreamObserver; - - @Mock private StatsDClient statsDClient; - - private QueryFeaturesRequest validRequest; - - private ServingGrpcService service; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - validRequest = - QueryFeaturesRequest.newBuilder() - .setEntityName("driver") - .addAllEntityId(Arrays.asList("driver1", "driver2", "driver3")) - .addAllFeatureId( - Arrays.asList("driver.completed_booking", "driver.last_opportunity")) - .build(); - - Tracer tracer = Configuration.fromEnv("dummy").getTracer(); - service = new ServingGrpcService(mockFeast, tracer, statsDClient); - } - - @Test - public void shouldPassValidRequestAsIs() { - service.queryFeatures(validRequest, mockStreamObserver); - verify(mockFeast).queryFeatures(validRequest); - } - - @Test - public void shouldCallOnErrorIfEntityNameIsNotSet() { - QueryFeaturesRequest missingEntityName = - QueryFeaturesRequest.newBuilder(validRequest).clearEntityName().build(); - - service.queryFeatures(missingEntityName, mockStreamObserver); - - verify(mockStreamObserver).onError(any(StatusRuntimeException.class)); - } - - @Test - public void shouldCallOnErrorIfEntityIdsIsNotSet() { - QueryFeaturesRequest missingEntityIds = - QueryFeaturesRequest.newBuilder(validRequest).clearEntityId().build(); - - service.queryFeatures(missingEntityIds, mockStreamObserver); - - verify(mockStreamObserver).onError(any(StatusRuntimeException.class)); - } - - @Test - public void shouldCallOnErrorIfFeatureIdsIsNotSet() { - QueryFeaturesRequest missingRequestDetails = - QueryFeaturesRequest.newBuilder(validRequest).clearFeatureId().build(); - - service.queryFeatures(missingRequestDetails, mockStreamObserver); - - verify(mockStreamObserver).onError(any(StatusRuntimeException.class)); - } - - @Test - public void shouldCallOnErrorIfFeatureIdsContainsDifferentEntity() { - QueryFeaturesRequest differentEntityReq = - QueryFeaturesRequest.newBuilder(validRequest) - .addFeatureId("customer.order_made") - .build(); - - service.queryFeatures(differentEntityReq, mockStreamObserver); - - verify(mockStreamObserver).onError(any(StatusRuntimeException.class)); - } -} diff --git a/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java b/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java deleted file mode 100644 index 16c0f6852c5..00000000000 --- a/serving/src/test/java/feast/serving/service/BigTableFeatureStorageTestITCase.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import static org.mockito.Mockito.when; - -import com.google.cloud.bigtable.hbase.BigtableConfiguration; -import com.google.cloud.bigtable.hbase.BigtableOptionsFactory; -import com.google.protobuf.Timestamp; -import feast.serving.model.FeatureValue; -import feast.serving.service.BigTableFeatureStorage.BigTableConnectionFactory; -import feast.serving.testutil.BigTablePopulator; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.Connection; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - -public class BigTableFeatureStorageTestITCase { - - private static final String ENTITY_NAME = "test_entity"; - - // The object under test - BigTableFeatureStorage featureStorage; - private BigTablePopulator bigTablePopulator; - private List entityIds; - private Timestamp now; - private Connection connection; - - @Before - public void setUp() throws Exception { - Configuration config = BigtableConfiguration.configure("dummyProject", "dummyInstance"); - config.set(BigtableOptionsFactory.BIGTABLE_EMULATOR_HOST_KEY, "localhost:8080"); - connection = BigtableConfiguration.connect(config); - - BigTableConnectionFactory connectionFactory = Mockito.mock(BigTableConnectionFactory.class); - when(connectionFactory.connect()).thenReturn(connection); - - // ideally use bigtable emulator. - bigTablePopulator = new BigTablePopulator(connection); - StorageSpec storageSpec = StorageSpec.newBuilder() - .setId(FeastServing.SERVING_STORAGE_ID) - .setType(BigTableFeatureStorage.TYPE).build(); - featureStorage = new BigTableFeatureStorage(storageSpec, connectionFactory); - - entityIds = createEntityIds(10); - now = Timestamp.newBuilder().setSeconds(System.currentTimeMillis() / 1000).build(); - } - - @After - public void tearDown() throws Exception { - connection.close(); - } - - @Test - public void getFeatures_shouldReturnLastValue() { - FeatureSpec featureSpec1 = - FeatureSpec.newBuilder() - .setEntity(ENTITY_NAME) - .setId("test_entity.feature_1") - .setName("feature_1") - .setValueType(ValueType.Enum.STRING) - .build(); - - FeatureSpec featureSpec2 = - FeatureSpec.newBuilder() - .setEntity(ENTITY_NAME) - .setId("test_entity.feature_2") - .setName("feature_2") - .setValueType(ValueType.Enum.STRING) - .build(); - - List featureSpecs = Arrays.asList(featureSpec1, featureSpec2); - bigTablePopulator.populate(ENTITY_NAME, entityIds, featureSpecs, now); - List results = featureStorage.getFeature(ENTITY_NAME, entityIds, featureSpecs); - - bigTablePopulator.validate(results, entityIds, featureSpecs); - } - - @Test - public void getFeatures_shouldGracefullyHandleMissingEntity() { - FeatureSpec featureSpec1 = - FeatureSpec.newBuilder() - .setEntity(ENTITY_NAME) - .setId("test_entity.feature_1") - .setName("feature_granularity_none") - .setValueType(ValueType.Enum.STRING) - .build(); - - FeatureSpec featureSpec2 = - FeatureSpec.newBuilder() - .setEntity(ENTITY_NAME) - .setId("test_entity.feature_2") - .setName("feature_2") - .setValueType(ValueType.Enum.STRING) - .build(); - - List featureSpecs = Arrays.asList(featureSpec1, featureSpec2); - List entityIdsWithMissingEntity = new ArrayList<>(entityIds); - entityIdsWithMissingEntity.add("100"); - bigTablePopulator.populate(ENTITY_NAME, entityIds, featureSpecs, now); - List results = - featureStorage.getFeature(ENTITY_NAME, entityIdsWithMissingEntity, featureSpecs); - bigTablePopulator.validate(results, entityIds, featureSpecs); - } - - private List createEntityIds(int count) { - List entityIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - entityIds.add(String.valueOf(i)); - } - return entityIds; - } -} diff --git a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java new file mode 100644 index 00000000000..2b422b609c1 --- /dev/null +++ b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java @@ -0,0 +1,118 @@ +package feast.serving.service; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.junit.Assert.*; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.Lists; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; + +public class CachedSpecServiceTest { + + private File configFile; + private Store store; + + @Rule + public final ExpectedException expectedException = ExpectedException.none(); + + @Mock + CoreSpecService coreService; + + private Map featureSetSpecs; + private CachedSpecService cachedSpecService; + + @Before + public void setUp() throws IOException { + initMocks(this); + + configFile = File.createTempFile( "serving", ".yml"); + String yamlString = "name: SERVING\n" + + "type: REDIS\n" + + "redis_config:\n" + + " host: localhost\n" + + " port: 6379\n" + + "subscriptions:\n" + + "- name: fs1\n" + + " version: \">0\"\n" + + "- name: fs2\n" + + " version: \">0\""; + BufferedWriter writer = new BufferedWriter(new FileWriter(configFile)); + writer.write(yamlString); + writer.close(); + + store = Store.newBuilder().setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .addSubscriptions(Subscription.newBuilder().setName("fs1").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setName("fs2").setVersion(">0").build()) + .build(); + + when(coreService.updateStore(UpdateStoreRequest.newBuilder().setStore(store).build())) + .thenReturn(UpdateStoreResponse.newBuilder().setStore(store).build()); + + featureSetSpecs = new LinkedHashMap<>(); + featureSetSpecs.put("fs1:1", FeatureSetSpec.newBuilder().setName("fs1").setVersion(1).build()); + featureSetSpecs.put("fs1:2", FeatureSetSpec.newBuilder().setName("fs1").setVersion(2).build()); + featureSetSpecs.put("fs2:1", FeatureSetSpec.newBuilder().setName("fs2").setVersion(1).build()); + + List fs1FeatureSets = Lists + .newArrayList(featureSetSpecs.get("fs1:1"), featureSetSpecs.get("fs1:2")); + List fs2FeatureSets = Lists.newArrayList(featureSetSpecs.get("fs2:1")); + when(coreService.listFeatureSets(ListFeatureSetsRequest + .newBuilder() + .setFilter(ListFeatureSetsRequest.Filter.newBuilder().setFeatureSetName("fs1") + .setFeatureSetVersion(">0").build()) + .build())) + .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs1FeatureSets).build()); + when(coreService.listFeatureSets(ListFeatureSetsRequest + .newBuilder() + .setFilter(ListFeatureSetsRequest.Filter.newBuilder().setFeatureSetName("fs2") + .setFeatureSetVersion(">0").build()) + .build())) + .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs2FeatureSets).build()); + + cachedSpecService = new CachedSpecService(coreService, configFile.toPath()); + } + + @After + public void tearDown() { + configFile.delete(); + } + + @Test + public void shouldPopulateAndReturnStore() { + cachedSpecService.populateCache(); + Store actual = cachedSpecService.getStore(); + assertThat(actual, equalTo(store)); + } + + @Test + public void shouldPopulateAndReturnFeatureSets() { + cachedSpecService.populateCache(); + assertThat(cachedSpecService.getFeatureSet("fs1", 1), equalTo(featureSetSpecs.get("fs1:1"))); + assertThat(cachedSpecService.getFeatureSet("fs1", 2), equalTo(featureSetSpecs.get("fs1:2"))); + assertThat(cachedSpecService.getFeatureSet("fs2", 1), equalTo(featureSetSpecs.get("fs2:1"))); + } +} diff --git a/serving/src/test/java/feast/serving/service/CachedSpecStorageTest.java b/serving/src/test/java/feast/serving/service/CachedSpecStorageTest.java deleted file mode 100644 index 5b3e8adfed1..00000000000 --- a/serving/src/test/java/feast/serving/service/CachedSpecStorageTest.java +++ /dev/null @@ -1,88 +0,0 @@ -package feast.serving.service; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.junit.Before; -import org.junit.Test; - -public class CachedSpecStorageTest { - - private CoreService coreService; - private CachedSpecStorage cachedSpecStorage; - - @Before - public void setUp() throws Exception { - coreService = mock(CoreService.class); - cachedSpecStorage = new CachedSpecStorage(coreService); - } - - @Test - public void testPopulateCache() { - Map featureSpecMap = new HashMap<>(); - featureSpecMap.put("feature_1", mock(FeatureSpec.class)); - Map entitySpecMap = new HashMap<>(); - entitySpecMap.put("entity_1", mock(EntitySpec.class)); - - when(coreService.getAllFeatureSpecs()).thenReturn(featureSpecMap); - when(coreService.getAllEntitySpecs()).thenReturn(entitySpecMap); - cachedSpecStorage.populateCache(); - - Map result = - cachedSpecStorage.getFeatureSpecs(Collections.singletonList("feature_1")); - Map result2 = - cachedSpecStorage.getEntitySpecs(Collections.singletonList("entity_1")); - - assertThat(result.size(), equalTo(1)); - assertThat(result2.size(), equalTo(1)); - - verify(coreService, times(0)).getFeatureSpecs(any(Iterable.class)); - verify(coreService, times(0)).getEntitySpecs(any(Iterable.class)); - } - - - @Test - public void reloadFailureShouldReturnOldValue() { - Map featureSpecMap = new HashMap<>(); - featureSpecMap.put("feature_1", mock(FeatureSpec.class)); - - Map storageSpecMap = new HashMap<>(); - storageSpecMap.put("storage_1", mock(StorageSpec.class)); - - Map entitySpecMap = new HashMap<>(); - entitySpecMap.put("entity_1", mock(EntitySpec.class)); - - when(coreService.getAllFeatureSpecs()).thenReturn(featureSpecMap); - when(coreService.getFeatureSpecs(any(Iterable.class))).thenThrow(new RuntimeException("error")); - when(coreService.getAllEntitySpecs()).thenReturn(entitySpecMap); - when(coreService.getEntitySpecs(any(Iterable.class))).thenThrow(new RuntimeException("error")); - - cachedSpecStorage.populateCache(); - Map result = - cachedSpecStorage.getFeatureSpecs(Collections.singletonList("feature_1")); - Map result2 = - cachedSpecStorage.getEntitySpecs(Collections.singletonList("entity_1")); - - assertThat(result.size(), equalTo(1)); - assertThat(result2.size(), equalTo(1)); - verify(coreService, times(0)).getFeatureSpecs(any(Iterable.class)); - verify(coreService, times(0)).getEntitySpecs(any(Iterable.class)); - - result = cachedSpecStorage.getFeatureSpecs(Collections.singletonList("feature_1")); - result2 = cachedSpecStorage.getEntitySpecs(Collections.singletonList("entity_1")); - assertThat(result.size(), equalTo(1)); - assertThat(result2.size(), equalTo(1)); - } -} \ No newline at end of file diff --git a/serving/src/test/java/feast/serving/service/CoreServiceTest.java b/serving/src/test/java/feast/serving/service/CoreServiceTest.java deleted file mode 100644 index deb27904213..00000000000 --- a/serving/src/test/java/feast/serving/service/CoreServiceTest.java +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import static org.hamcrest.CoreMatchers.everyItem; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.collection.IsIn.isIn; -import static org.junit.Assert.assertThat; - -import com.google.protobuf.ByteString; -import com.google.protobuf.Empty; -import feast.core.CoreServiceGrpc.CoreServiceImplBase; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesRequest; -import feast.core.CoreServiceProto.CoreServiceTypes.GetFeaturesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListEntitiesResponse; -import feast.core.CoreServiceProto.CoreServiceTypes.ListFeaturesResponse; -import feast.serving.exception.SpecRetrievalException; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import io.grpc.inprocess.InProcessChannelBuilder; -import io.grpc.inprocess.InProcessServerBuilder; -import io.grpc.stub.StreamObserver; -import io.grpc.testing.GrpcCleanupRule; -import io.grpc.util.MutableHandlerRegistry; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class CoreServiceTest { - - @Rule - public final GrpcCleanupRule grpcCleanupRule = new GrpcCleanupRule(); - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - - private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); - private CoreService client; - - @Before - public void setUp() throws Exception { - String serverName = InProcessServerBuilder.generateName(); - - grpcCleanupRule.register( - InProcessServerBuilder.forName(serverName) - .directExecutor() - .fallbackHandlerRegistry(serviceRegistry) - .build() - .start()); - - ManagedChannelBuilder builder = InProcessChannelBuilder.forName(serverName).directExecutor(); - client = new CoreService(builder); - } - - @After - public void tearDown() throws Exception { - client.shutdown(); - } - - @Test - public void getEntitySpecs_shouldSendCorrectRequest() { - List entityIds = Arrays.asList("driver", "customer"); - final AtomicReference requestDelivered = new AtomicReference<>(); - - CoreServiceImplBase getEntitiesSpecImpl = - new CoreServiceImplBase() { - @Override - public void getEntities( - GetEntitiesRequest request, StreamObserver responseObserver) { - requestDelivered.set(request); - responseObserver.onNext(GetEntitiesResponse.newBuilder().build()); - responseObserver.onCompleted(); - } - }; - serviceRegistry.addService(getEntitiesSpecImpl); - - client.getEntitySpecs(entityIds); - - List actual = requestDelivered.get().getIdsList().asByteStringList(); - List expected = - entityIds.stream().map(s -> ByteString.copyFromUtf8(s)).collect(Collectors.toList()); - - assertThat(actual, containsInAnyOrder(expected.toArray())); - } - - @Test - public void getEntitySpecs_shouldReturnRequestedEntitySpecs() { - List entityIds = Arrays.asList("driver", "customer"); - - final GetEntitiesResponse response = - GetEntitiesResponse.newBuilder().addAllEntities(getFakeEntitySpecs().values()).build(); - - CoreServiceImplBase getEntitiesSpecImpl = - new CoreServiceImplBase() { - @Override - public void getEntities( - GetEntitiesRequest request, StreamObserver responseObserver) { - responseObserver.onNext(response); - responseObserver.onCompleted(); - } - }; - serviceRegistry.addService(getEntitiesSpecImpl); - - Map result = client.getEntitySpecs(entityIds); - assertThat(result.entrySet(), everyItem(isIn(getFakeEntitySpecs().entrySet()))); - } - - @Test - public void getEntitySpecs_shouldThrowSpecRetrievalExceptionWhenErrorHappens() { - expectedException.expect(SpecRetrievalException.class); - expectedException.expectMessage("Unable to retrieve entity spec"); - expectedException.expectCause(instanceOf(StatusRuntimeException.class)); - - List entityIds = Arrays.asList("driver", "customer"); - - client.getEntitySpecs(entityIds); - } - - @Test - public void getAllEntitySpecs_shouldReturnAllSpecs() { - CoreServiceImplBase serviceImpl = - new CoreServiceImplBase() { - @Override - public void listEntities( - Empty request, StreamObserver responseObserver) { - responseObserver.onNext( - ListEntitiesResponse.newBuilder() - .addAllEntities(getFakeEntitySpecs().values()) - .build()); - responseObserver.onCompleted(); - } - }; - serviceRegistry.addService(serviceImpl); - - Map result = client.getAllEntitySpecs(); - - assertThat(result.entrySet(), everyItem(isIn(getFakeEntitySpecs().entrySet()))); - } - - @Test - public void getAllEntitySpecs_shouldThrowExceptionWhenErrorHappens() { - expectedException.expect(SpecRetrievalException.class); - expectedException.expectMessage("Unable to retrieve entity spec"); - expectedException.expectCause(instanceOf(StatusRuntimeException.class)); - - client.getAllEntitySpecs(); - } - - @Test - public void getFeatureSpecs_shouldSendCorrectRequest() { - List featureIds = - Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); - AtomicReference deliveredRequest = new AtomicReference<>(); - CoreServiceImplBase service = - new CoreServiceImplBase() { - @Override - public void getFeatures( - GetFeaturesRequest request, StreamObserver responseObserver) { - deliveredRequest.set(request); - responseObserver.onNext(GetFeaturesResponse.newBuilder().build()); - responseObserver.onCompleted(); - } - }; - - serviceRegistry.addService(service); - - client.getFeatureSpecs(featureIds); - - List expected = - featureIds.stream().map(s -> ByteString.copyFromUtf8(s)).collect(Collectors.toList()); - List actual = deliveredRequest.get().getIdsList().asByteStringList(); - - assertThat(actual, containsInAnyOrder(expected.toArray())); - } - - @Test - public void getFeatureSpecs_shouldReturnRequestedFeatureSpecs() { - List featureIds = - Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); - AtomicReference deliveredRequest = new AtomicReference<>(); - CoreServiceImplBase service = - new CoreServiceImplBase() { - @Override - public void getFeatures( - GetFeaturesRequest request, StreamObserver responseObserver) { - deliveredRequest.set(request); - responseObserver.onNext( - GetFeaturesResponse.newBuilder() - .addAllFeatures(getFakeFeatureSpecs().values()) - .build()); - responseObserver.onCompleted(); - } - }; - - serviceRegistry.addService(service); - - Map results = client.getFeatureSpecs(featureIds); - - assertThat(results.entrySet(), everyItem(isIn(getFakeFeatureSpecs().entrySet()))); - } - - @Test - public void getFeatureSpecs_shouldThrowSpecsRetrievalExceptionWhenErrorHappen() { - expectedException.expect(SpecRetrievalException.class); - expectedException.expectMessage("Unable to retrieve feature specs"); - expectedException.expectCause(instanceOf(StatusRuntimeException.class)); - - List featureIds = - Arrays.asList("driver.total_accepted_booking", "driver.ping_location"); - client.getFeatureSpecs(featureIds); - } - - @Test - public void getAllFeatureSpecs_shouldReturnAllSpecs() { - CoreServiceImplBase service = - new CoreServiceImplBase() { - @Override - public void listFeatures( - Empty request, StreamObserver responseObserver) { - responseObserver.onNext( - ListFeaturesResponse.newBuilder() - .addAllFeatures(getFakeFeatureSpecs().values()) - .build()); - responseObserver.onCompleted(); - } - }; - serviceRegistry.addService(service); - Map results = client.getAllFeatureSpecs(); - - assertThat(results.entrySet(), everyItem(isIn(getFakeFeatureSpecs().entrySet()))); - } - - @Test - public void getAllFeatureSpecs_shouldThrowSpecRetrievalExceptionWhenErrorHappen() { - expectedException.expect(SpecRetrievalException.class); - expectedException.expectMessage("Unable to retrieve feature specs"); - expectedException.expectCause(instanceOf(StatusRuntimeException.class)); - - client.getAllFeatureSpecs(); - } - - - private Map getFakeFeatureSpecs() { - FeatureSpec spec1 = - FeatureSpec.newBuilder() - .setId("driver.total_accepted_booking") - .setName("total_accepted_booking") - .setOwner("dummy@go-jek.com") - .setDescription("awesome feature") - .setValueType(ValueType.Enum.STRING) - .build(); - - FeatureSpec spec2 = - FeatureSpec.newBuilder() - .setId("driver.ping") - .setName("ping") - .setOwner("dummy@go-jek.com") - .setDescription("awesome feature") - .setValueType(ValueType.Enum.INT64) - .build(); - - return Stream.of(spec1, spec2) - .collect(Collectors.toMap(FeatureSpec::getId, Function.identity())); - } - - private Map getFakeEntitySpecs() { - EntitySpec spec1 = - EntitySpec.newBuilder() - .setName("driver") - .setDescription("fake driver entity") - .addTags("tag1") - .addTags("tag2") - .build(); - - EntitySpec spec2 = - EntitySpec.newBuilder() - .setName("customer") - .setDescription("fake customer entity") - .addTags("tag1") - .addTags("tag2") - .build(); - - return Stream.of(spec1, spec2) - .collect(Collectors.toMap(EntitySpec::getName, Function.identity())); - } - - private StorageSpec getFakeStorageSpec() { - StorageSpec spec = StorageSpec.newBuilder().setId(FeastServing.SERVING_STORAGE_ID) - .setType("redis") - .putOptions("host", "localhost") - .putOptions("port", "1234").build(); - return spec; - } -} diff --git a/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java b/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java deleted file mode 100644 index b1d5b190d0e..00000000000 --- a/serving/src/test/java/feast/serving/service/FeatureRetrievalDispatcherTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import feast.serving.config.AppConfig; -import feast.specs.FeatureSpecProto.FeatureSpec; -import io.opentracing.util.GlobalTracer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class FeatureRetrievalDispatcherTest { - - @Mock - FeatureStorageRegistry featureStorageRegistry; - private FeatureRetrievalDispatcher dispatcher; - private List entityIds; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - entityIds = createEntityIds(10); - - dispatcher = - new FeatureRetrievalDispatcher( - featureStorageRegistry, GlobalTracer.get()); - } - - @Test - public void shouldGetFeaturesFromStorage() { - String entityName = "entity"; - FeatureStorage featureStorage = mock(FeatureStorage.class); - when(featureStorage.getFeature(any(String.class), any(List.class), any(List.class))) - .thenReturn(Collections.emptyList()); - when(featureStorageRegistry.get(any(String.class))).thenReturn(featureStorage); - - String featureId = "entity.feature_1"; - FeatureSpec featureSpec = FeatureSpec.newBuilder().setId(featureId).build(); - dispatcher.dispatchFeatureRetrieval( - entityName, entityIds, Collections.singletonList(featureSpec)); - - verify(featureStorage) - .getFeature(entityName, entityIds, Collections.singletonList(featureSpec)); - } - - private List createEntityIds(int count) { - List entityIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - entityIds.add("entity_" + i); - } - return entityIds; - } -} diff --git a/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java b/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java deleted file mode 100644 index adb2ba859bb..00000000000 --- a/serving/src/test/java/feast/serving/service/RedisFeatureStorageTest.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.service; - -import static junit.framework.TestCase.fail; - -import com.google.protobuf.Timestamp; -import feast.serving.model.FeatureValue; -import feast.serving.testutil.RedisPopulator; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.ValueType; -import io.opentracing.util.GlobalTracer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import redis.clients.jedis.JedisPool; -import redis.embedded.RedisServer; - -public class RedisFeatureStorageTest { - - public static final String REDIS_HOST = "localhost"; - public static final int REDIS_PORT = 6377; - - // embedded redis - RedisServer redisServer; - RedisPopulator redisPopulator; - - // class under test - RedisFeatureStorage redisFs; - - private List entityIds; - private String entityName; - private Timestamp now; - - @Before - public void setUp() throws Exception { - redisServer = new RedisServer(REDIS_PORT); - try { - redisServer.start(); - } catch (Exception e) { - System.out.println("Unable to start redis redisServer"); - fail(); - } - - JedisPool jedisPool = new JedisPool(REDIS_HOST, REDIS_PORT); - redisFs = new RedisFeatureStorage(jedisPool, GlobalTracer.get()); - - redisPopulator = new RedisPopulator(REDIS_HOST, REDIS_PORT); - entityIds = createEntityIds(10); - entityName = "entity"; - - now = Timestamp.newBuilder().setSeconds(System.currentTimeMillis() / 1000).build(); - } - - @Test - public void getFeatures_shouldNotReturnMissingValue() { - FeatureSpec featureSpec1 = - FeatureSpec.newBuilder() - .setId("entity.feature_1") - .setEntity(entityName) - .setValueType(ValueType.Enum.STRING) - .build(); - - FeatureSpec featureSpec2 = - FeatureSpec.newBuilder() - .setId("entity.feature_2") - .setEntity(entityName) - .setValueType(ValueType.Enum.STRING) - .build(); - - List featureSpecs = Arrays.asList(featureSpec1, featureSpec2); - redisPopulator.populate(entityName, entityIds, featureSpecs, now); - - // add entity without feature - List requestEntityIds = new ArrayList<>(entityIds); - requestEntityIds.add("100"); - List result = redisFs.getFeature(entityName, requestEntityIds, featureSpecs); - redisPopulator.validate(result, entityIds, featureSpecs); - } - - @Test - public void getFeatures_shouldReturnLastValue() { - FeatureSpec spec1 = createFeatureSpec("feature_1"); - FeatureSpec spec2 = createFeatureSpec("feature_2"); - List featureSpecs = Arrays.asList(spec1, spec2); - - redisPopulator.populate(entityName, entityIds, featureSpecs, now); - - List result = redisFs.getFeature(entityName, entityIds, featureSpecs); - - redisPopulator.validate(result, entityIds, featureSpecs); - } - - private FeatureSpec createFeatureSpec(String featureName) { - return createFeatureSpec(featureName, ValueType.Enum.STRING); - } - - private FeatureSpec createFeatureSpec( - String featureName, ValueType.Enum valType) { - String entityName = "entity"; - String featureId = String.format("%s.%s", entityName, featureName); - FeatureSpec spec = - FeatureSpec.newBuilder() - .setEntity(entityName) - .setId(featureId) - .setName(featureName) - .setValueType(valType) - .build(); - - return spec; - } - - private List createEntityIds(int count) { - List entityIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - entityIds.add(String.valueOf(i)); - } - return entityIds; - } - - @After - public void tearDown() throws Exception { - redisServer.stop(); - } -} diff --git a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java new file mode 100644 index 00000000000..c1b64ab484e --- /dev/null +++ b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java @@ -0,0 +1,417 @@ +package feast.serving.service; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.Lists; +import com.google.protobuf.AbstractMessageLite; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.storage.RedisProto.RedisKey; +import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto.Field; +import feast.types.ValueProto.Value; +import io.opentracing.Tracer; +import io.opentracing.Tracer.SpanBuilder; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.Mockito; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; + +public class RedisServingServiceTest { + + @Mock + JedisPool jedisPool; + + @Mock + Jedis jedis; + + @Mock + CachedSpecService specService; + + @Mock + Tracer tracer; + + private RedisServingService redisServingService; + private byte[][] redisKeyList; + + @Before + public void setUp() { + initMocks(this); + FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() + .addEntities(EntitySpec.newBuilder().setName("entity1")) + .addEntities(EntitySpec.newBuilder().setName("entity2")) + .setMaxAge(Duration.newBuilder().setSeconds(30)) // default + .build(); + + when(specService.getFeatureSet("featureSet", 1)) + .thenReturn(featureSetSpec); + + redisServingService = new RedisServingService(jedisPool, specService, tracer); + redisKeyList = Lists.newArrayList( + RedisKey.newBuilder().setFeatureSet("featureSet:1") + .addAllEntities(Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build() + )).build(), + RedisKey.newBuilder().setFeatureSet("featureSet:1") + .addAllEntities(Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build() + )).build() + ).stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()) + .toArray(new byte[0][0]); + } + + @Test + public void shouldReturnResponseWithValuesIfKeysPresent() { + GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build() + ); + + List featureRowBytes = featureRows.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()); + when(jedisPool.getResource()).thenReturn(jedis); + when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", intValue(2)) + .putFields("featureSet:1:feature2", intValue(2))) + .build(); + GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); + assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + } + + @Test + public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { + // some keys not present, should have empty values + GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder()) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").build(), + Field.newBuilder().setName("feature2").build())) + .setFeatureSet("featureSet:1") + .build() + ); + + List featureRowBytes = Lists.newArrayList(featureRows.get(0).toByteArray(), null); + when(jedisPool.getResource()).thenReturn(jedis); + when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); + GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); + assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + } + + @Test + public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { + // keys present, but too stale comp. to maxAge set in request + GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .setMaxAge(Duration.newBuilder().setSeconds(10)) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(50)) // this value should be nulled + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build() + ); + + List featureRowBytes = featureRows.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()); + when(jedisPool.getResource()).thenReturn(jedis); + when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); + GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); + assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + } + + + @Test + public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { + // keys present, but too stale comp. to maxAge set in featureSetSpec + GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(0)) // this value should be nulled + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build() + ); + + List featureRowBytes = featureRows.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()); + when(jedisPool.getResource()).thenReturn(jedis); + when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); + GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); + assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + } + + + @Test + public void shouldFilterOutUndesiredRows() { + // requested rows less than the rows available in the featureset + GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets(FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1")) + .build()) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows(EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields(Lists + .newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build() + ); + + List featureRowBytes = featureRows.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()); + when(jedisPool.getResource()).thenReturn(jedis); + when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1))) + .addFieldValues(FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", intValue(2))) + .build(); + GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); + assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + } + + private List> responseToMapList(GetOnlineFeaturesResponse response) { + return response.getFieldValuesList().stream().map(FieldValues::getFieldsMap).collect(Collectors.toList()); + } + + private Value intValue(int val) { + return Value.newBuilder().setInt64Val(val).build(); + } + + private Value strValue(String val) { + return Value.newBuilder().setStringVal(val).build(); + } +} \ No newline at end of file diff --git a/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java b/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java deleted file mode 100644 index f0040864286..00000000000 --- a/serving/src/test/java/feast/serving/testutil/BigTablePopulator.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.testutil; - -import static junit.framework.TestCase.fail; - -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.storage.BigTableProto.BigTableRowKey; -import feast.types.ValueProto.Value; -import java.io.IOException; -import java.util.Collection; -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.TableExistsException; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.TableDescriptor; - -/** - * Helper class to populate a BigTable instance with fake data. It mimic ingesting data to BigTable. - */ -public class BigTablePopulator extends FeatureStoragePopulator { - private static final byte[] DEFAULT_COLUMN_FAMILY = "default".getBytes(); - private static final String LATEST_KEY = "0"; - private final Connection connection; - - public BigTablePopulator(Connection connection) { - this.connection = connection; - } - - /** - * Populate big table instance with fake data. - * - * @param entityName entity name of the feature - * @param entityIds collection of entity ID for which the feature should be populated - * @param featureSpecs collection of feature specs for which the feature should be populated - * @param timestamp timestamp of the features - */ - @Override - public void populate( - String entityName, - Collection entityIds, - Collection featureSpecs, - Timestamp timestamp) { - - createTableIfNecessary(entityName); - populateTableWithFakeData(entityName, entityIds, featureSpecs, timestamp); - } - - private void createTableIfNecessary(String entityName) { - try (Admin admin = connection.getAdmin()) { - - TableName tableName = TableName.valueOf(entityName); - TableDescriptor tableDescriptor = new HTableDescriptor(tableName); - if (admin.tableExists(tableName)) { - return; - } - - admin.createTable(tableDescriptor); - ColumnFamilyDescriptor cfDesc = ColumnFamilyDescriptorBuilder.of(DEFAULT_COLUMN_FAMILY); - admin.addColumnFamily(tableName, cfDesc); - - } catch (TableExistsException e) { - System.out.println("Table already exists: " + entityName); - } catch (IOException e) { - System.out.println("unable to connect to table: " + entityName); - e.printStackTrace(); - fail(); - } - } - - private void populateTableWithFakeData( - String entityName, - Collection entityIds, - Collection featureSpecs, - Timestamp timestamp) { - TableName tableName = TableName.valueOf(entityName); - try (Table table = connection.getTable(tableName)) { - for (FeatureSpec featureSpec : featureSpecs) { - for (String entityId : entityIds) { - Put put = makePut(entityId, featureSpec, timestamp); - table.put(put); - } - } - - } catch (IOException e) { - System.out.println("unable to connect to table: " + entityName); - e.printStackTrace(); - fail(); - } - } - - private Put makePut(String entityId, FeatureSpec fs, Timestamp roundedTimestamp) { - BigTableRowKey rowKey = - BigTableRowKey.newBuilder() - .setEntityKey(entityId) - .setReversedMillis(LATEST_KEY) - .setSha1Prefix(DigestUtils.sha1Hex(entityId.getBytes()).substring(0, 7)) - .build(); - Put put = new Put(rowKey.toByteArray()); - long timestamp = Timestamps.toMillis(roundedTimestamp); - Value val = createValue(entityId, fs.getId(), roundedTimestamp, fs.getValueType()); - put.addColumn(DEFAULT_COLUMN_FAMILY, fs.getId().getBytes(), timestamp, val.toByteArray()); - return put; - } -} diff --git a/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java b/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java deleted file mode 100644 index f392b47b3c6..00000000000 --- a/serving/src/test/java/feast/serving/testutil/FakeSpecStorage.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.testutil; - -import feast.serving.service.SpecStorage; -import feast.specs.EntitySpecProto.EntitySpec; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.specs.StorageSpecProto.StorageSpec; -import feast.types.ValueProto.ValueType; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -public class FakeSpecStorage implements SpecStorage { - - Map entitySpecMap = new HashMap<>(); - Map featureSpecMap = new HashMap<>(); - StorageSpec storageSpec; - - public FakeSpecStorage(String storageType) { - String lastOpportunityId = "driver.last_opportunity"; - String lastOpportunityName = "last_opportunity"; - String dailyCompletedBookingId = "driver.total_completed_booking"; - String dailyCompletedBookingName = "total_completed_booking"; - - if (storageType.equals("bigtable")) { - // populate with hardcoded value - storageSpec = StorageSpec.newBuilder().setId("SERVING").setType("bigtable") - .putOptions("project", "project") - .putOptions("instance", "instance") - .putOptions("family", "default") - .putOptions("prefix", "") - .build(); - } else if (storageType.equals("redis")) { - storageSpec = StorageSpec.newBuilder().setId("SERVING").setType("redis") - .putOptions("host", "localhost") - .putOptions("port", "1234") - .build(); - } - - EntitySpec driver = EntitySpec.newBuilder().setName("driver").build(); - - entitySpecMap.put("driver", driver); - - FeatureSpec lastOpportunity = - FeatureSpec.newBuilder() - .setId(lastOpportunityId) - .setName(lastOpportunityName) - .setValueType(ValueType.Enum.INT64) - .build(); - - FeatureSpec totalCompleted = - FeatureSpec.newBuilder() - .setId(dailyCompletedBookingId) - .setName(dailyCompletedBookingName) - .setValueType(ValueType.Enum.INT64) - .build(); - - featureSpecMap.put(lastOpportunityId, lastOpportunity); - featureSpecMap.put(dailyCompletedBookingId, totalCompleted); - } - - @Override - public Map getEntitySpecs(Iterable entityIds) { - return StreamSupport.stream(entityIds.spliterator(), false) - .filter(entitySpecMap::containsKey) - .collect(Collectors.toMap(Function.identity(), entitySpecMap::get)); - } - - @Override - public Map getAllEntitySpecs() { - return Collections.unmodifiableMap(entitySpecMap); - } - - @Override - public Map getFeatureSpecs(Iterable featureIds) { - return StreamSupport.stream(featureIds.spliterator(), false) - .filter(featureSpecMap::containsKey) - .collect(Collectors.toMap(Function.identity(), featureSpecMap::get)); - } - - @Override - public boolean isConnected() { - return true; - } -} diff --git a/serving/src/test/java/feast/serving/testutil/FeatureStoragePopulator.java b/serving/src/test/java/feast/serving/testutil/FeatureStoragePopulator.java deleted file mode 100644 index 823be09d212..00000000000 --- a/serving/src/test/java/feast/serving/testutil/FeatureStoragePopulator.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.testutil; - -import static java.util.stream.Collectors.groupingBy; -import static org.apache.hadoop.hbase.shaded.org.junit.Assert.assertNotNull; -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; - -import com.google.protobuf.Timestamp; -import feast.serving.model.FeatureValue; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueType; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public abstract class FeatureStoragePopulator { - - /** - * Populate feature storage with fake data. - * - * @param entityName entity name. - * @param entityIds collection of entity ID to be added. - * @param featureSpecs collection of feature's spec for which the data should be added. - * @param timestamp event timestamp of data - */ - public abstract void populate( - String entityName, - Collection entityIds, - Collection featureSpecs, - Timestamp timestamp); - - /** - * Create a historical feature value based on parameters. - * - * @param entityId - * @param featureId - * @param ts - * @param valType - * @return - */ - protected Value createValue( - String entityId, String featureId, Timestamp ts, ValueType.Enum valType) { - switch (valType) { - case INT64: - return Value.newBuilder().setInt64Val(ts.getSeconds()).build(); - case STRING: - String value = String.format("%s_%s_%s", entityId, featureId, ts.getSeconds()); - return Value.newBuilder().setStringVal(value).build(); - default: - throw new IllegalArgumentException("not yet supported"); - } - } - - public void validate( - List result, List entityIds, List featureSpecs) { - Map>> entityMap = toEntityMap(result); - - assertNotNull(entityMap); - assertThat(entityMap.size(), equalTo(entityIds.size())); - - for (String entityId : entityIds) { - Map> featureMap = entityMap.get(entityId); - assertNotNull(featureMap); - assertThat(featureMap.size(), equalTo(featureSpecs.size())); - for (FeatureSpec featureSpec : featureSpecs) { - List featureValueList = featureMap.get(featureSpec.getId()); - assertNotNull(featureValueList); - assertThat(featureValueList.size(), equalTo(1)); - - FeatureValue featureValue = featureValueList.get(0); - Timestamp timestamp = featureValue.getTimestamp(); - validateValue(featureValue, entityId, featureSpec, timestamp); - } - } - } - - private void validateValue( - FeatureValue featureValue, String entityId, FeatureSpec featureSpec, Timestamp timestamp) { - Value actualValue = featureValue.getValue(); - Value expectedValue = - createValue(entityId, featureSpec.getId(), timestamp, featureSpec.getValueType()); - assertThat(actualValue, equalTo(expectedValue)); - } - - private Map>> toEntityMap( - List featureValues) { - Map> temp = - featureValues.stream().collect(groupingBy(FeatureValue::getEntityId)); - - Map>> entityMap = new HashMap<>(); - for (Map.Entry> entry : temp.entrySet()) { - entityMap.put( - entry.getKey(), - entry.getValue().stream().collect(groupingBy(FeatureValue::getFeatureId))); - } - return entityMap; - } -} diff --git a/serving/src/test/java/feast/serving/testutil/RedisPopulator.java b/serving/src/test/java/feast/serving/testutil/RedisPopulator.java deleted file mode 100644 index 21235a9bfa0..00000000000 --- a/serving/src/test/java/feast/serving/testutil/RedisPopulator.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.testutil; - -import com.google.protobuf.Timestamp; -import feast.specs.FeatureSpecProto.FeatureSpec; -import feast.storage.RedisProto.RedisBucketKey; -import feast.storage.RedisProto.RedisBucketValue; -import java.util.Collection; -import org.apache.commons.codec.digest.DigestUtils; -import redis.clients.jedis.Jedis; - -public class RedisPopulator extends FeatureStoragePopulator { - private final Jedis jedis; - - public RedisPopulator(String redisHost, int redisPort) { - jedis = new Jedis(redisHost, redisPort); - } - - @Override - public void populate( - String entityName, - Collection entityIds, - Collection featureSpecs, - Timestamp timestamp) { - for (FeatureSpec fs : featureSpecs) { - for (String entityId : entityIds) { - addData(entityId, fs, timestamp); - } - } - } - - /** - * Add feature value. - * - * @param entityId entityId of data to be added. - * @param fs feature spec of the feature to be added. - * @param timestamp timestamp of data - */ - private void addData(String entityId, FeatureSpec fs, Timestamp timestamp) { - RedisBucketKey bucketKey = createBucketKey(entityId, getFeatureIdSha1Prefix(fs.getId()), 0); - RedisBucketValue bucketValue = - RedisBucketValue.newBuilder() - .setValue(createValue(entityId, fs.getId(), timestamp, fs.getValueType())) - .setEventTimestamp(timestamp) - .build(); - byte[] key = bucketKey.toByteArray(); - byte[] value = bucketValue.toByteArray(); - jedis.set(key, value); - } - - /** - * Create {@link RedisBucketKey}. - * - * @param entityId - * @param featureIdSha1Prefix - * @param bucketId - * @return - */ - private RedisBucketKey createBucketKey( - String entityId, String featureIdSha1Prefix, long bucketId) { - return RedisBucketKey.newBuilder() - .setEntityKey(entityId) - .setFeatureIdSha1Prefix(featureIdSha1Prefix) - .setBucketId(bucketId) - .build(); - } - - /** - * Convenient function to calculate feature id's sha1 prefix. - * - * @param featureId feature ID - * @return first 7 characters of SHA1(featureID) - */ - private String getFeatureIdSha1Prefix(String featureId) { - return DigestUtils.sha1Hex(featureId.getBytes()).substring(0, 7); - } -} diff --git a/serving/src/test/java/feast/serving/testutil/ResourceUtil.java b/serving/src/test/java/feast/serving/testutil/ResourceUtil.java deleted file mode 100644 index a9c98437147..00000000000 --- a/serving/src/test/java/feast/serving/testutil/ResourceUtil.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.testutil; - -import java.io.File; - -public final class ResourceUtil { - - private ResourceUtil() {} - - public static final File getFileFromResource(String path) { - return new File(ResourceUtil.class.getClassLoader().getResource(path).getFile()); - } -} diff --git a/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java b/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java deleted file mode 100644 index 642ef275cd4..00000000000 --- a/serving/src/test/java/feast/serving/util/EntityMapBuilderTest.java +++ /dev/null @@ -1,234 +0,0 @@ -/* - * Copyright 2018 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package feast.serving.util; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; - -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import feast.serving.ServingAPIProto; -import feast.serving.ServingAPIProto.Entity; -import feast.serving.model.FeatureValue; -import feast.types.ValueProto.Value; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import org.junit.Test; - -public class EntityMapBuilderTest { - @Test - public void toEntityMap_shouldNotReturnNull() { - EntityMapBuilder builder = new EntityMapBuilder(); - assertNotNull(builder.toEntityMap()); - } - - @Test - public void addFeatureValueList_shouldWorkForOneEntityIdAndOneFeatureId() { - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(1); - List featureIds = createFeatureIds(1); - Timestamp timestamp = Timestamps.fromSeconds(0); - List featureValueList = createFeatureValues(entityIds, featureIds, timestamp); - - builder.addFeatureValueList(featureValueList); - - Map result = builder.toEntityMap(); - - validate(result, entityIds, featureIds, timestamp); - } - - @Test - public void addFeatureValueList_shouldWorkForSeveralEntityIdAndOneFeatureId() { - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(10); - List featureIds = createFeatureIds(1); - Timestamp timestamp = Timestamps.fromSeconds(0); - List featureValueList = createFeatureValues(entityIds, featureIds, timestamp); - - builder.addFeatureValueList(featureValueList); - - Map result = builder.toEntityMap(); - validate(result, entityIds, featureIds, timestamp); - } - - @Test - public void addFeatureValueList_shouldWorkForSeveralEntityIdAndSeveralFeatureId() { - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(10); - List featureIds = createFeatureIds(10); - Timestamp timestamp = Timestamps.fromSeconds(0); - List featureValueList = createFeatureValues(entityIds, featureIds, timestamp); - - builder.addFeatureValueList(featureValueList); - - Map result = builder.toEntityMap(); - - validate(result, entityIds, featureIds, timestamp); - } - - @Test - public void addFeatureValueList_shouldWorkForMultipleCallOfDifferentEntityId() { - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(20); - List featureIds = createFeatureIds(20); - Timestamp timestamp = Timestamps.fromSeconds(0); - List featureValueList1 = - createFeatureValues(entityIds.subList(0, 10), featureIds, timestamp); - List featureValueList2 = - createFeatureValues(entityIds.subList(10, 20), featureIds, timestamp); - - builder.addFeatureValueList(featureValueList1); - builder.addFeatureValueList(featureValueList2); - - Map result = builder.toEntityMap(); - - validate(result, entityIds, featureIds, timestamp); - } - - @Test - public void addFeatureValueList_shouldWorkForMultipleCallOfDifferentFeature() { - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(20); - List featureIds = createFeatureIds(20); - Timestamp timestamp = Timestamps.fromSeconds(0); - List featureValueList1 = - createFeatureValues(entityIds, featureIds.subList(0, 10), timestamp); - List featureValueList2 = - createFeatureValues(entityIds, featureIds.subList(10, 20), timestamp); - - builder.addFeatureValueList(featureValueList1); - builder.addFeatureValueList(featureValueList2); - - Map result = builder.toEntityMap(); - - validate(result, entityIds, featureIds, timestamp); - } - - @Test - public void shouldBeThreadSafe() throws Exception { - int nbThread = 16; - int featurePerThread = 10; - ListeningExecutorService service = - MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(nbThread)); - CountDownLatch latch = new CountDownLatch(1); - AtomicBoolean running = new AtomicBoolean(); - - EntityMapBuilder builder = new EntityMapBuilder(); - List entityIds = createEntityIds(20); - List featureIds = createFeatureIds(featurePerThread * nbThread); - Timestamp timestamp = Timestamps.fromSeconds(0); - - List> featureValueList = new ArrayList<>(); - for (int i = 0; i < nbThread; i++) { - featureValueList.add( - createFeatureValues( - entityIds, - featureIds.subList(i * featurePerThread, (i + 1) * featurePerThread), - timestamp)); - } - - List> futures = new ArrayList<>(); - for (int i = 0; i < nbThread; i++) { - final int j = i; - futures.add( - service.submit( - () -> { - try { - latch.await(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - running.set(true); - builder.addFeatureValueList(featureValueList.get(j)); - running.set(false); - return null; - })); - } - - ListenableFuture> all = Futures.allAsList(futures); - latch.countDown(); - - all.get(); - - validate(builder.toEntityMap(), entityIds, featureIds, timestamp); - } - - private void validate( - Map result, - List entityIds, - List featureIds, - Timestamp timestamp) { - assertThat(result.size(), equalTo(entityIds.size())); - for (String entityId : entityIds) { - Entity entity = result.get(entityId); - assertNotNull(entity); - Map featureValueMap = entity.getFeaturesMap(); - assertNotNull(featureValueMap); - assertThat(featureValueMap.size(), equalTo(featureIds.size())); - for (String featureId : featureIds) { - ServingAPIProto.FeatureValue featureValue = featureValueMap.get(featureId); - - assertThat(featureValue.getTimestamp(), equalTo(timestamp)); - assertThat(timestamp.getSeconds(), equalTo(featureValue.getValue().getInt64Val())); - } - } - } - - private List createFeatureIds(int count) { - List featureIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - featureIds.add("entity.feature_" + i); - } - return featureIds; - } - - private List createEntityIds(int count) { - List entityIds = new ArrayList<>(); - for (int i = 0; i < count; i++) { - entityIds.add("entity_" + i); - } - return entityIds; - } - - private List createFeatureValues( - List entityIds, List featureIds, Timestamp timestamp) { - List featureValues = new ArrayList<>(); - for (String entityId : entityIds) { - for (String featureId : featureIds) { - featureValues.add( - new FeatureValue( - featureId, - entityId, - Value.newBuilder().setInt64Val(timestamp.getSeconds()).build(), - timestamp)); - } - } - return featureValues; - } -} diff --git a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java new file mode 100644 index 00000000000..88d227d76a7 --- /dev/null +++ b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java @@ -0,0 +1,36 @@ +package feast.serving.util.mappers; + +import static org.hamcrest.core.IsEqual.equalTo; +import static org.junit.Assert.*; + +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.RedisConfig; +import feast.core.StoreProto.Store.StoreType; +import feast.core.StoreProto.Store.Subscription; +import java.io.IOException; +import org.junit.Test; + +public class YamlToProtoMapperTest { + + @Test + public void shouldConvertYamlToProto() throws IOException { + String yaml = "name: test\n" + + "type: REDIS\n" + + "redis_config:\n" + + " host: localhost\n" + + " port: 6379\n" + + "subscriptions:\n" + + "- name: \"*\"\n" + + " version: \">0\"\n"; + Store store = YamlToProtoMapper.yamlToStoreProto(yaml); + Store expected = Store.newBuilder() + .setName("test") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .addSubscriptions(Subscription.newBuilder() + .setName("*") + .setVersion(">0")) + .build(); + assertThat(store, equalTo(expected)); + } +} \ No newline at end of file diff --git a/tests/e2e/basic/cust_trans_fs.yaml b/tests/e2e/basic/cust_trans_fs.yaml new file mode 100644 index 00000000000..e72ee616eb8 --- /dev/null +++ b/tests/e2e/basic/cust_trans_fs.yaml @@ -0,0 +1,11 @@ +name: customer_transactions +kind: feature_set +entities: +- name: customer_id + valueType: INT64 +features: +- name: daily_transactions + valueType: FLOAT +- name: total_transactions + valueType: FLOAT +maxAge: 3600s diff --git a/tests/e2e/basic/data.csv b/tests/e2e/basic/data.csv new file mode 100644 index 00000000000..d2994d253a3 --- /dev/null +++ b/tests/e2e/basic/data.csv @@ -0,0 +1,3 @@ +datetime,customer_id,daily_transactions,total_transactions +1570366527,1001,1.3,500 +1570366536,1002,1.4,600 \ No newline at end of file diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py new file mode 100644 index 00000000000..b37770a83f9 --- /dev/null +++ b/tests/e2e/conftest.py @@ -0,0 +1,4 @@ +def pytest_addoption(parser): + parser.addoption("--core_url", action="store", default="localhost:6565") + parser.addoption("--serving_url", action="store", default="localhost:6566") + parser.addoption("--allow_dirty", action="store", default="False") diff --git a/tests/e2e/large_volume/cust_trans_large_fs.yaml b/tests/e2e/large_volume/cust_trans_large_fs.yaml new file mode 100644 index 00000000000..04707412aa6 --- /dev/null +++ b/tests/e2e/large_volume/cust_trans_large_fs.yaml @@ -0,0 +1,11 @@ +name: customer_transactions_large +kind: feature_set +entities: +- name: customer_id + valueType: INT64 +features: +- name: daily_transactions + valueType: FLOAT +- name: total_transactions + valueType: FLOAT +maxAge: 3600s diff --git a/tests/e2e/pytest.ini b/tests/e2e/pytest.ini new file mode 100644 index 00000000000..b0e5a945f54 --- /dev/null +++ b/tests/e2e/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore::DeprecationWarning \ No newline at end of file diff --git a/tests/e2e/requirements.txt b/tests/e2e/requirements.txt new file mode 100644 index 00000000000..6b999421c04 --- /dev/null +++ b/tests/e2e/requirements.txt @@ -0,0 +1,8 @@ +mock==2.0.0 +numpy==1.16.4 +pandas==0.24.2 +pytest==5.2.1 +pytest-benchmark==3.2.2 +pytest-mock==1.10.4 +pytest-timeout==1.3.3 +pytest-ordering==0.6.* diff --git a/tests/e2e/test_e2e.py b/tests/e2e/test_e2e.py new file mode 100644 index 00000000000..bcfdafa754f --- /dev/null +++ b/tests/e2e/test_e2e.py @@ -0,0 +1,404 @@ +import pytest +import math +import random +import time +from feast.entity import Entity +from feast.serving.ServingService_pb2 import ( + GetOnlineFeaturesRequest, + GetOnlineFeaturesResponse, +) +from feast.types.Value_pb2 import Value as Value +from feast.client import Client +from feast.feature_set import FeatureSet +from feast.type_map import ValueType +from google.protobuf.duration_pb2 import Duration +from datetime import datetime +import pytz + +import pandas as pd +import numpy as np + +from feast.feature import Feature + +FLOAT_TOLERANCE = 0.00001 + + +@pytest.fixture(scope='session') +def core_url(pytestconfig): + return pytestconfig.getoption("core_url") + + +@pytest.fixture(scope='session') +def serving_url(pytestconfig): + return pytestconfig.getoption("serving_url") + + +@pytest.fixture(scope='session') +def allow_dirty(pytestconfig): + return True if pytestconfig.getoption( + "allow_dirty").lower() == "true" else False + + +@pytest.fixture(scope='session') +def client(core_url, serving_url, allow_dirty): + # Get client for core and serving + client = Client(core_url=core_url, serving_url=serving_url) + + # Ensure Feast core is active, but empty + if not allow_dirty: + feature_sets = client.list_feature_sets() + if len(feature_sets) > 0: + raise Exception( + "Feast cannot have existing feature sets registered. Exiting tests." + ) + + return client + + +@pytest.fixture(scope='session') +def basic_dataframe(): + offset = random.randint(1000, 100000) # ensure a unique key space is used + return pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in + range(5)], + "customer_id": [offset + inc for inc in range(5)], + "daily_transactions": [np.random.rand() for _ in range(5)], + "total_transactions": [512 for _ in range(5)], + } + ) + + +@pytest.mark.timeout(45) +@pytest.mark.run(order=10) +def test_basic_register_feature_set_success(client): + # Load feature set from file + cust_trans_fs_expected = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") + + # Register feature set + client.apply(cust_trans_fs_expected) + + # Feast Core needs some time to fully commit the FeatureSet applied + # when there is no existing job yet for the Featureset + time.sleep(15) + + cust_trans_fs_actual = client.get_feature_set(name="customer_transactions") + + assert cust_trans_fs_actual == cust_trans_fs_expected + + if cust_trans_fs_actual is None: + raise Exception( + "Client cannot retrieve 'customer_transactions' FeatureSet " + "after registration. Either Feast Core does not save the " + "FeatureSet correctly or the client needs to wait longer for FeatureSet " + "to be committed." + ) + + +@pytest.mark.timeout(30) +@pytest.mark.run(order=11) +def test_basic_ingest_success(client, basic_dataframe): + cust_trans_fs = client.get_feature_set(name="customer_transactions") + + # Ingest customer transaction data + client.ingest(cust_trans_fs, dataframe=basic_dataframe) + + +@pytest.mark.timeout(45) +@pytest.mark.run(order=12) +def test_basic_retrieve_online_success(client, basic_dataframe): + # Poll serving for feature values until the correct values are returned + while True: + time.sleep(1) + + response = client.get_online_features( + entity_rows=[ + GetOnlineFeaturesRequest.EntityRow( + fields={ + "customer_id": Value( + int64_val=basic_dataframe.iloc[0]["customer_id"] + ) + } + ) + ], + feature_ids=[ + "customer_transactions:1:daily_transactions", + "customer_transactions:1:total_transactions", + ], + ) # type: GetOnlineFeaturesResponse + + if response is None: + continue + + returned_daily_transactions = float( + response.field_values[0] + .fields["customer_transactions:1:daily_transactions"] + .float_val + ) + sent_daily_transactions = float( + basic_dataframe.iloc[0]["daily_transactions"]) + + if math.isclose( + sent_daily_transactions, + returned_daily_transactions, + abs_tol=FLOAT_TOLERANCE, + ): + break + + +@pytest.fixture(scope='session') +def all_types_dataframe(): + return pd.DataFrame( + { + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in + range(3)], + "user_id": [1001, 1002, 1003], + "int32_feature": [np.int32(1), np.int32(2), np.int32(3)], + "int64_feature": [np.int64(1), np.int64(2), np.int64(3)], + "float_feature": [np.float(0.1), np.float(0.2), np.float(0.3)], + "double_feature": [np.float64(0.1), np.float64(0.2), + np.float64(0.3)], + "string_feature": ["one", "two", "three"], + "bytes_feature": [b"one", b"two", b"three"], + "bool_feature": [True, False, False], + "int32_list_feature": [ + np.array([1, 2, 3, 4], dtype=np.int32), + np.array([1, 2, 3, 4], dtype=np.int32), + np.array([1, 2, 3, 4], dtype=np.int32), + ], + "int64_list_feature": [ + np.array([1, 2, 3, 4], dtype=np.int64), + np.array([1, 2, 3, 4], dtype=np.int64), + np.array([1, 2, 3, 4], dtype=np.int64), + ], + "float_list_feature": [ + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float32), + ], + "double_list_feature": [ + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + np.array([1.1, 1.2, 1.3, 1.4], dtype=np.float64), + ], + "string_list_feature": [ + np.array(["one", "two", "three"]), + np.array(["one", "two", "three"]), + np.array(["one", "two", "three"]), + ], + "bytes_list_feature": [ + np.array([b"one", b"two", b"three"]), + np.array([b"one", b"two", b"three"]), + np.array([b"one", b"two", b"three"]), + ], + "bool_list_feature": [ + np.array([True, False, True]), + np.array([True, False, True]), + np.array([True, False, True]), + ], + } + ) + + +@pytest.mark.timeout(45) +@pytest.mark.run(order=20) +def test_all_types_register_feature_set_success(client): + all_types_fs_expected = FeatureSet( + name="all_types", + entities=[Entity(name="user_id", dtype=ValueType.INT64)], + features=[ + Feature(name="float_feature", dtype=ValueType.FLOAT), + Feature(name="int64_feature", dtype=ValueType.INT64), + Feature(name="int32_feature", dtype=ValueType.INT32), + Feature(name="string_feature", dtype=ValueType.STRING), + Feature(name="bytes_feature", dtype=ValueType.BYTES), + Feature(name="bool_feature", dtype=ValueType.BOOL), + Feature(name="double_feature", dtype=ValueType.DOUBLE), + Feature(name="float_list_feature", dtype=ValueType.FLOAT_LIST), + Feature(name="int64_list_feature", dtype=ValueType.INT64_LIST), + Feature(name="int32_list_feature", dtype=ValueType.INT32_LIST), + Feature(name="string_list_feature", + dtype=ValueType.STRING_LIST), + Feature(name="bytes_list_feature", dtype=ValueType.BYTES_LIST), + Feature(name="bool_list_feature", dtype=ValueType.BOOL_LIST), + Feature(name="double_list_feature", + dtype=ValueType.DOUBLE_LIST), + ], + max_age=Duration(seconds=3600), + ) + + # Register feature set + client.apply(all_types_fs_expected) + + # Feast Core needs some time to fully commit the FeatureSet applied + # when there is no existing job yet for the Featureset + time.sleep(10) + + all_types_fs_actual = client.get_feature_set(name="all_types") + + assert all_types_fs_actual == all_types_fs_expected + + if all_types_fs_actual is None: + raise Exception( + "Client cannot retrieve 'all_types_fs' FeatureSet " + "after registration. Either Feast Core does not save the " + "FeatureSet correctly or the client needs to wait longer for FeatureSet " + "to be committed." + ) + + +@pytest.mark.timeout(300) +@pytest.mark.run(order=21) +def test_all_types_ingest_success(client, all_types_dataframe): + # Get all_types feature set + all_types_fs = client.get_feature_set(name="all_types") + + # Ingest user embedding data + client.ingest(all_types_fs, dataframe=all_types_dataframe) + + +@pytest.mark.timeout(300) +@pytest.mark.run(order=22) +def test_all_types_retrieve_online_success(client, all_types_dataframe): + + # Poll serving for feature values until the correct values are returned + while True: + time.sleep(1) + + response = client.get_online_features( + entity_rows=[ + GetOnlineFeaturesRequest.EntityRow( + fields={"user_id": Value( + int64_val=all_types_dataframe.iloc[0]["user_id"])} + ) + ], + feature_ids=[ + "all_types:1:float_feature", + "all_types:1:int64_feature", + "all_types:1:int32_feature", + "all_types:1:string_feature", + "all_types:1:bytes_feature", + "all_types:1:bool_feature", + "all_types:1:double_feature", + "all_types:1:float_list_feature", + "all_types:1:int64_list_feature", + "all_types:1:int32_list_feature", + "all_types:1:string_list_feature", + "all_types:1:bytes_list_feature", + "all_types:1:bool_list_feature", + "all_types:1:double_list_feature", + ], + ) # type: GetOnlineFeaturesResponse + + if response is None: + continue + + returned_float_list = ( + response.field_values[0] + .fields["all_types:1:float_list_feature"] + .float_list_val.val + ) + + sent_float_list = all_types_dataframe.iloc[0]["float_list_feature"] + + if math.isclose( + returned_float_list[0], sent_float_list[0], abs_tol=FLOAT_TOLERANCE + ): + break + + +@pytest.fixture(scope='session') +def large_volume_dataframe(): + ROW_COUNT = 100000 + offset = random.randint(1000000, 10000000) # ensure a unique key space + customer_data = pd.DataFrame( + { + "datetime": [ + datetime.utcnow().replace(tzinfo=pytz.utc) for _ in + range(ROW_COUNT) + ], + "customer_id": [offset + inc for inc in range(ROW_COUNT)], + "daily_transactions": [np.random.rand() for _ in range(ROW_COUNT)], + "total_transactions": [256 for _ in range(ROW_COUNT)], + } + ) + return customer_data + + +@pytest.mark.timeout(30) +@pytest.mark.run(order=30) +def test_large_volume_register_feature_set_success(client): + cust_trans_fs_expected = FeatureSet.from_yaml( + "large_volume/cust_trans_large_fs.yaml") + + # Register feature set + client.apply(cust_trans_fs_expected) + + # Feast Core needs some time to fully commit the FeatureSet applied + # when there is no existing job yet for the Featureset + time.sleep(10) + cust_trans_fs_actual = client.get_feature_set(name="customer_transactions_large") + + assert cust_trans_fs_actual == cust_trans_fs_expected + + if cust_trans_fs_actual is None: + raise Exception( + "Client cannot retrieve 'customer_transactions' FeatureSet " + "after registration. Either Feast Core does not save the " + "FeatureSet correctly or the client needs to wait longer for FeatureSet " + "to be committed." + ) + + +@pytest.mark.timeout(90) +@pytest.mark.run(order=31) +def test_large_volume_ingest_success(client, large_volume_dataframe): + + # Get large volume feature set + cust_trans_fs = client.get_feature_set(name="customer_transactions_large") + + # Ingest customer transaction data + client.ingest(cust_trans_fs, dataframe=large_volume_dataframe) + + +@pytest.mark.timeout(20) +@pytest.mark.run(order=32) +def test_large_volume_retrieve_online_success(client, large_volume_dataframe): + # Poll serving for feature values until the correct values are returned + while True: + time.sleep(1) + + response = client.get_online_features( + entity_rows=[ + GetOnlineFeaturesRequest.EntityRow( + fields={ + "customer_id": Value( + int64_val=large_volume_dataframe.iloc[0][ + "customer_id"] + ) + } + ) + ], + feature_ids=[ + "customer_transactions_large:1:daily_transactions", + "customer_transactions_large:1:total_transactions", + ], + ) # type: GetOnlineFeaturesResponse + + if response is None: + continue + + returned_daily_transactions = float( + response.field_values[0] + .fields["customer_transactions_large:1:daily_transactions"] + .float_val + ) + sent_daily_transactions = float( + large_volume_dataframe.iloc[0]["daily_transactions"]) + + if math.isclose( + sent_daily_transactions, + returned_daily_transactions, + abs_tol=FLOAT_TOLERANCE, + ): + break diff --git a/integration-tests/.gitignore b/tests/integration-tests/.gitignore similarity index 100% rename from integration-tests/.gitignore rename to tests/integration-tests/.gitignore diff --git a/integration-tests/feast-helm-values.yaml.template b/tests/integration-tests/feast-helm-values.yaml.template similarity index 98% rename from integration-tests/feast-helm-values.yaml.template rename to tests/integration-tests/feast-helm-values.yaml.template index a8906e57327..075ac4ec9e0 100644 --- a/integration-tests/feast-helm-values.yaml.template +++ b/tests/integration-tests/feast-helm-values.yaml.template @@ -73,7 +73,7 @@ core: port: 80 targetPort: 8080 jobs: - workspace: "/tmp" + workspace: "gs://feast-templocation-kf-feast" runner: DirectRunner options: "{}" errorStoreType: "stdout" diff --git a/integration-tests/testdata/entity_specs/entity_1.yaml b/tests/integration-tests/testdata/entity_specs/entity_1.yaml similarity index 100% rename from integration-tests/testdata/entity_specs/entity_1.yaml rename to tests/integration-tests/testdata/entity_specs/entity_1.yaml diff --git a/integration-tests/testdata/entity_specs/entity_2.yaml b/tests/integration-tests/testdata/entity_specs/entity_2.yaml similarity index 100% rename from integration-tests/testdata/entity_specs/entity_2.yaml rename to tests/integration-tests/testdata/entity_specs/entity_2.yaml diff --git a/integration-tests/testdata/feature_specs/entity_1.feature_1.yaml b/tests/integration-tests/testdata/feature_specs/entity_1.feature_1.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_1.feature_1.yaml rename to tests/integration-tests/testdata/feature_specs/entity_1.feature_1.yaml diff --git a/integration-tests/testdata/feature_specs/entity_1.feature_2.yaml b/tests/integration-tests/testdata/feature_specs/entity_1.feature_2.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_1.feature_2.yaml rename to tests/integration-tests/testdata/feature_specs/entity_1.feature_2.yaml diff --git a/integration-tests/testdata/feature_specs/entity_1.feature_3.yaml b/tests/integration-tests/testdata/feature_specs/entity_1.feature_3.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_1.feature_3.yaml rename to tests/integration-tests/testdata/feature_specs/entity_1.feature_3.yaml diff --git a/integration-tests/testdata/feature_specs/entity_1.feature_4.yaml b/tests/integration-tests/testdata/feature_specs/entity_1.feature_4.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_1.feature_4.yaml rename to tests/integration-tests/testdata/feature_specs/entity_1.feature_4.yaml diff --git a/integration-tests/testdata/feature_specs/entity_2.feature_1.yaml b/tests/integration-tests/testdata/feature_specs/entity_2.feature_1.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_2.feature_1.yaml rename to tests/integration-tests/testdata/feature_specs/entity_2.feature_1.yaml diff --git a/integration-tests/testdata/feature_specs/entity_2.feature_2.yaml b/tests/integration-tests/testdata/feature_specs/entity_2.feature_2.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_2.feature_2.yaml rename to tests/integration-tests/testdata/feature_specs/entity_2.feature_2.yaml diff --git a/integration-tests/testdata/feature_specs/entity_2.feature_3.yaml b/tests/integration-tests/testdata/feature_specs/entity_2.feature_3.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_2.feature_3.yaml rename to tests/integration-tests/testdata/feature_specs/entity_2.feature_3.yaml diff --git a/integration-tests/testdata/feature_specs/entity_2.feature_4.yaml b/tests/integration-tests/testdata/feature_specs/entity_2.feature_4.yaml similarity index 100% rename from integration-tests/testdata/feature_specs/entity_2.feature_4.yaml rename to tests/integration-tests/testdata/feature_specs/entity_2.feature_4.yaml diff --git a/integration-tests/testdata/feature_values/ingestion_1.csv b/tests/integration-tests/testdata/feature_values/ingestion_1.csv similarity index 100% rename from integration-tests/testdata/feature_values/ingestion_1.csv rename to tests/integration-tests/testdata/feature_values/ingestion_1.csv diff --git a/integration-tests/testdata/feature_values/ingestion_2.csv b/tests/integration-tests/testdata/feature_values/ingestion_2.csv similarity index 100% rename from integration-tests/testdata/feature_values/ingestion_2.csv rename to tests/integration-tests/testdata/feature_values/ingestion_2.csv diff --git a/integration-tests/testdata/feature_values/serving_1.csv b/tests/integration-tests/testdata/feature_values/serving_1.csv similarity index 100% rename from integration-tests/testdata/feature_values/serving_1.csv rename to tests/integration-tests/testdata/feature_values/serving_1.csv diff --git a/integration-tests/testdata/feature_values/serving_2.csv b/tests/integration-tests/testdata/feature_values/serving_2.csv similarity index 100% rename from integration-tests/testdata/feature_values/serving_2.csv rename to tests/integration-tests/testdata/feature_values/serving_2.csv diff --git a/integration-tests/testdata/import_specs/batch_from_gcs.yaml.template b/tests/integration-tests/testdata/import_specs/batch_from_gcs.yaml.template similarity index 100% rename from integration-tests/testdata/import_specs/batch_from_gcs.yaml.template rename to tests/integration-tests/testdata/import_specs/batch_from_gcs.yaml.template diff --git a/integration-tests/testdata/import_specs/stream_from_kafka.yaml.template b/tests/integration-tests/testdata/import_specs/stream_from_kafka.yaml.template similarity index 100% rename from integration-tests/testdata/import_specs/stream_from_kafka.yaml.template rename to tests/integration-tests/testdata/import_specs/stream_from_kafka.yaml.template diff --git a/integration-tests/testutils/kafka_consumer.py b/tests/integration-tests/testutils/kafka_consumer.py similarity index 100% rename from integration-tests/testutils/kafka_consumer.py rename to tests/integration-tests/testutils/kafka_consumer.py diff --git a/integration-tests/testutils/kafka_producer.py b/tests/integration-tests/testutils/kafka_producer.py similarity index 100% rename from integration-tests/testutils/kafka_producer.py rename to tests/integration-tests/testutils/kafka_producer.py diff --git a/integration-tests/testutils/requirements.txt b/tests/integration-tests/testutils/requirements.txt similarity index 54% rename from integration-tests/testutils/requirements.txt rename to tests/integration-tests/testutils/requirements.txt index a0b3f9c9840..901869c8df6 100644 --- a/integration-tests/testutils/requirements.txt +++ b/tests/integration-tests/testutils/requirements.txt @@ -1,3 +1,4 @@ pandas==0.24.* -numpy==1.15.* +numpy==1.16.* kafka-python==1.4.* +grpcio==1.20.* diff --git a/integration-tests/testutils/spec.py b/tests/integration-tests/testutils/spec.py similarity index 100% rename from integration-tests/testutils/spec.py rename to tests/integration-tests/testutils/spec.py diff --git a/integration-tests/testutils/validate_feature_values.py b/tests/integration-tests/testutils/validate_feature_values.py similarity index 100% rename from integration-tests/testutils/validate_feature_values.py rename to tests/integration-tests/testutils/validate_feature_values.py diff --git a/ui/.env.production b/ui/.env.production deleted file mode 100644 index e3c6cc76b8a..00000000000 --- a/ui/.env.production +++ /dev/null @@ -1 +0,0 @@ -VUE_APP_ROOT_API=/api/ui diff --git a/ui/.gitignore b/ui/.gitignore deleted file mode 100644 index 921789e0c1c..00000000000 --- a/ui/.gitignore +++ /dev/null @@ -1,27 +0,0 @@ -/lib -/lib-legacy -/node_modules -*.log -/.nyc_output -/coverage -/dist -/dist-debug -/artifacts -/updates -/resources/winsetup/generated.wxs -/resources/winsetup/obj -/resources/winsetup/bin -/resources/win-chocolatey/tools/chocolateyinstall.ps1 -.vs -*.msi -*.nupkg -test/fixtures/**/.fbkpm -/tmp/ -/__tests__/fixtures/**/_* -/__tests__/fixtures/request-cache/GET/localhost/.bin -.idea -.yarn-meta -.pnp.js -.pnp -/packages/lockfile/index.js -.vscode/ diff --git a/ui/README.md b/ui/README.md deleted file mode 100644 index c1c24a2719d..00000000000 --- a/ui/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# feast-dashboard - -## Project setup - -1. Install [node](https://nodejs.org/en/download/) -2. Install [yarn](https://yarnpkg.com/lang/en/docs/install/) -3. Go to `ui` directory and run: - -``` -yarn install -``` - -## Development - -Compiles and hot-reloads for development: - -``` -yarn run serve -``` - -Compiles and minifies for production: - -``` -yarn run build -``` - -Lints and fixes files: - -``` -yarn run lint -``` diff --git a/ui/babel.config.js b/ui/babel.config.js deleted file mode 100644 index fd1df2a3008..00000000000 --- a/ui/babel.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - presets: [ - '@vue/app' - ] -}; diff --git a/ui/package.json b/ui/package.json deleted file mode 100644 index 6b57245edb1..00000000000 --- a/ui/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "feast-dashboard", - "version": "0.1.0", - "private": true, - "scripts": { - "serve": "vue-cli-service serve", - "build": "rm -rf ../core/src/main/resources/static && vue-cli-service build && mv dist ../core/src/main/resources/static", - "lint": "vue-cli-service lint" - }, - "dependencies": { - "clipboard": "^2.0.4", - "json2yaml": "^1.1.0", - "uikit": "^3.1.5", - "vue": "^2.6.10", - "vue-resource": "^1.5.1", - "vue-router": "^3.0.6", - "vue-select": "^2.6.4" - }, - "devDependencies": { - "@vue/cli-plugin-babel": "^3.7.0", - "@vue/cli-plugin-eslint": "^3.7.0", - "@vue/cli-service": "^3.7.0", - "vue-template-compiler": "^2.6.10" - }, - "eslintConfig": { - "root": true, - "env": { - "node": true - }, - "extends": [ - "plugin:vue/essential", - "eslint:recommended" - ], - "rules": {}, - "parserOptions": { - "parser": "babel-eslint" - } - }, - "postcss": { - "plugins": { - "autoprefixer": {} - } - }, - "browserslist": [ - "> 1%", - "last 2 versions", - "not ie <= 8" - ] -} diff --git a/ui/public/favicon.ico b/ui/public/favicon.ico deleted file mode 100644 index c7b9a43c8cd..00000000000 Binary files a/ui/public/favicon.ico and /dev/null differ diff --git a/ui/public/index.html b/ui/public/index.html deleted file mode 100644 index 185eb582d35..00000000000 --- a/ui/public/index.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - feast-dashboard - - - -

- - - diff --git a/ui/src/App.vue b/ui/src/App.vue deleted file mode 100644 index 57f1d7145e4..00000000000 --- a/ui/src/App.vue +++ /dev/null @@ -1,17 +0,0 @@ - - - diff --git a/ui/src/assets/logo.png b/ui/src/assets/logo.png deleted file mode 100644 index f3d2503fc2a..00000000000 Binary files a/ui/src/assets/logo.png and /dev/null differ diff --git a/ui/src/components/Entity/SearchTable.vue b/ui/src/components/Entity/SearchTable.vue deleted file mode 100644 index 5c0847cd08a..00000000000 --- a/ui/src/components/Entity/SearchTable.vue +++ /dev/null @@ -1,266 +0,0 @@ - - - - - \ No newline at end of file diff --git a/ui/src/components/Feature/SearchTable.vue b/ui/src/components/Feature/SearchTable.vue deleted file mode 100644 index b304b719ef5..00000000000 --- a/ui/src/components/Feature/SearchTable.vue +++ /dev/null @@ -1,301 +0,0 @@ - - - - - \ No newline at end of file diff --git a/ui/src/components/Job/SearchTable.vue b/ui/src/components/Job/SearchTable.vue deleted file mode 100644 index ac82c7ae8ec..00000000000 --- a/ui/src/components/Job/SearchTable.vue +++ /dev/null @@ -1,302 +0,0 @@ - - - - - \ No newline at end of file diff --git a/ui/src/components/NavigationBar.vue b/ui/src/components/NavigationBar.vue deleted file mode 100644 index 620767acd1e..00000000000 --- a/ui/src/components/NavigationBar.vue +++ /dev/null @@ -1,27 +0,0 @@ - - - \ No newline at end of file diff --git a/ui/src/components/Storage/SearchTable.vue b/ui/src/components/Storage/SearchTable.vue deleted file mode 100644 index 1aeae3343c8..00000000000 --- a/ui/src/components/Storage/SearchTable.vue +++ /dev/null @@ -1,272 +0,0 @@ - - - - - \ No newline at end of file diff --git a/ui/src/filters.js b/ui/src/filters.js deleted file mode 100644 index f335d77e059..00000000000 --- a/ui/src/filters.js +++ /dev/null @@ -1,7 +0,0 @@ -export default { - formatTimestamp (timestamp) { - let datetime = new Date(timestamp); - return datetime.toLocaleTimeString('en-US'); - } -} - diff --git a/ui/src/main.js b/ui/src/main.js deleted file mode 100644 index b2c657568b4..00000000000 --- a/ui/src/main.js +++ /dev/null @@ -1,17 +0,0 @@ -import Vue from 'vue' -import VueResource from 'vue-resource' -import App from './App.vue' -import router from './router' -import UIkit from 'uikit'; -import Icons from 'uikit/dist/js/uikit-icons'; - - -Vue.config.productionTip = false; -UIkit.use(Icons); - -Vue.use(VueResource); - -new Vue({ - router, - render: h => h(App) -}).$mount('#app'); diff --git a/ui/src/router.js b/ui/src/router.js deleted file mode 100644 index 9d7fec8d6bf..00000000000 --- a/ui/src/router.js +++ /dev/null @@ -1,54 +0,0 @@ -import Vue from 'vue' -import Router from 'vue-router' - -import FeatureList from './views/Feature/List.vue' -import FeatureDetails from './views/Feature/Details.vue' -import EntityList from './views/Entity/List.vue' -import StorageList from './views/Storage/List.vue' - -Vue.use(Router); - -export default new Router({ - routes: [ - { - path: '/', - name: 'home', - component: FeatureList - }, - { - path: '/features', - name: 'features', - component: FeatureList - }, - { - path: '/features/:entity', - name: 'features-by-entity', - component: FeatureList - }, - { - path: '/feature/:id', - name: 'feature-details', - component: FeatureDetails - }, - { - path: '/entities', - name: 'entities', - component: EntityList - }, - { - path: '/entity/:name', - name: 'entity-details', - component: EntityList - }, - { - path: '/storage', - name: 'storage', - component: StorageList - }, - { - path: '/storage/:id', - name: 'storage-details', - component: StorageList - }, - ] -}) diff --git a/ui/src/views/Entity/List.vue b/ui/src/views/Entity/List.vue deleted file mode 100644 index 71bac936831..00000000000 --- a/ui/src/views/Entity/List.vue +++ /dev/null @@ -1,26 +0,0 @@ - - - - - diff --git a/ui/src/views/Feature/Details.vue b/ui/src/views/Feature/Details.vue deleted file mode 100644 index 9dddd58ff9f..00000000000 --- a/ui/src/views/Feature/Details.vue +++ /dev/null @@ -1,218 +0,0 @@ - - - - diff --git a/ui/src/views/Feature/List.vue b/ui/src/views/Feature/List.vue deleted file mode 100644 index 708a019ee3c..00000000000 --- a/ui/src/views/Feature/List.vue +++ /dev/null @@ -1,26 +0,0 @@ - - - - - diff --git a/ui/src/views/Storage/List.vue b/ui/src/views/Storage/List.vue deleted file mode 100644 index d6e805eba5d..00000000000 --- a/ui/src/views/Storage/List.vue +++ /dev/null @@ -1,26 +0,0 @@ - - - - - diff --git a/ui/vue.config.js b/ui/vue.config.js deleted file mode 100644 index 0bd43805c0b..00000000000 --- a/ui/vue.config.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - publicPath: './' -} \ No newline at end of file diff --git a/ui/yarn.lock b/ui/yarn.lock deleted file mode 100644 index 262a152ae1c..00000000000 --- a/ui/yarn.lock +++ /dev/null @@ -1,8156 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@babel/code-frame@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0.tgz#06e2ab19bdb535385559aabb5ba59729482800f8" - integrity sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA== - dependencies: - "@babel/highlight" "^7.0.0" - -"@babel/core@^7.0.0": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.4.5.tgz#081f97e8ffca65a9b4b0fdc7e274e703f000c06a" - integrity sha512-OvjIh6aqXtlsA8ujtGKfC7LYWksYSX8yQcM8Ay3LuvVeQ63lcOKgoZWVqcpFwkd29aYU9rVx7jxhfhiEDV9MZA== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.4.4" - "@babel/helpers" "^7.4.4" - "@babel/parser" "^7.4.5" - "@babel/template" "^7.4.4" - "@babel/traverse" "^7.4.5" - "@babel/types" "^7.4.4" - convert-source-map "^1.1.0" - debug "^4.1.0" - json5 "^2.1.0" - lodash "^4.17.11" - resolve "^1.3.2" - semver "^5.4.1" - source-map "^0.5.0" - -"@babel/generator@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.4.4.tgz#174a215eb843fc392c7edcaabeaa873de6e8f041" - integrity sha512-53UOLK6TVNqKxf7RUh8NE851EHRxOOeVXKbK2bivdb+iziMyk03Sr4eaE9OELCbyZAAafAKPDwF2TPUES5QbxQ== - dependencies: - "@babel/types" "^7.4.4" - jsesc "^2.5.1" - lodash "^4.17.11" - source-map "^0.5.0" - trim-right "^1.0.1" - -"@babel/helper-annotate-as-pure@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz#323d39dd0b50e10c7c06ca7d7638e6864d8c5c32" - integrity sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q== - dependencies: - "@babel/types" "^7.0.0" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz#6b69628dfe4087798e0c4ed98e3d4a6b2fbd2f5f" - integrity sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.1.0" - "@babel/types" "^7.0.0" - -"@babel/helper-call-delegate@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz#87c1f8ca19ad552a736a7a27b1c1fcf8b1ff1f43" - integrity sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ== - dependencies: - "@babel/helper-hoist-variables" "^7.4.4" - "@babel/traverse" "^7.4.4" - "@babel/types" "^7.4.4" - -"@babel/helper-create-class-features-plugin@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.4.4.tgz#fc3d690af6554cc9efc607364a82d48f58736dba" - integrity sha512-UbBHIa2qeAGgyiNR9RszVF7bUHEdgS4JAUNT8SiqrAN6YJVxlOxeLr5pBzb5kan302dejJ9nla4RyKcR1XT6XA== - dependencies: - "@babel/helper-function-name" "^7.1.0" - "@babel/helper-member-expression-to-functions" "^7.0.0" - "@babel/helper-optimise-call-expression" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-replace-supers" "^7.4.4" - "@babel/helper-split-export-declaration" "^7.4.4" - -"@babel/helper-define-map@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.4.4.tgz#6969d1f570b46bdc900d1eba8e5d59c48ba2c12a" - integrity sha512-IX3Ln8gLhZpSuqHJSnTNBWGDE9kdkTEWl21A/K7PQ00tseBwbqCHTvNLHSBd9M0R5rER4h5Rsvj9vw0R5SieBg== - dependencies: - "@babel/helper-function-name" "^7.1.0" - "@babel/types" "^7.4.4" - lodash "^4.17.11" - -"@babel/helper-explode-assignable-expression@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz#537fa13f6f1674df745b0c00ec8fe4e99681c8f6" - integrity sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA== - dependencies: - "@babel/traverse" "^7.1.0" - "@babel/types" "^7.0.0" - -"@babel/helper-function-name@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz#a0ceb01685f73355d4360c1247f582bfafc8ff53" - integrity sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw== - dependencies: - "@babel/helper-get-function-arity" "^7.0.0" - "@babel/template" "^7.1.0" - "@babel/types" "^7.0.0" - -"@babel/helper-get-function-arity@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz#83572d4320e2a4657263734113c42868b64e49c3" - integrity sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ== - dependencies: - "@babel/types" "^7.0.0" - -"@babel/helper-hoist-variables@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz#0298b5f25c8c09c53102d52ac4a98f773eb2850a" - integrity sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w== - dependencies: - "@babel/types" "^7.4.4" - -"@babel/helper-member-expression-to-functions@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.0.0.tgz#8cd14b0a0df7ff00f009e7d7a436945f47c7a16f" - integrity sha512-avo+lm/QmZlv27Zsi0xEor2fKcqWG56D5ae9dzklpIaY7cQMK5N8VSpaNVPPagiqmy7LrEjK1IWdGMOqPu5csg== - dependencies: - "@babel/types" "^7.0.0" - -"@babel/helper-module-imports@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.0.0.tgz#96081b7111e486da4d2cd971ad1a4fe216cc2e3d" - integrity sha512-aP/hlLq01DWNEiDg4Jn23i+CXxW/owM4WpDLFUbpjxe4NS3BhLVZQ5i7E0ZrxuQ/vwekIeciyamgB1UIYxxM6A== - dependencies: - "@babel/types" "^7.0.0" - -"@babel/helper-module-transforms@^7.1.0", "@babel/helper-module-transforms@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz#96115ea42a2f139e619e98ed46df6019b94414b8" - integrity sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/helper-simple-access" "^7.1.0" - "@babel/helper-split-export-declaration" "^7.4.4" - "@babel/template" "^7.4.4" - "@babel/types" "^7.4.4" - lodash "^4.17.11" - -"@babel/helper-optimise-call-expression@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz#a2920c5702b073c15de51106200aa8cad20497d5" - integrity sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g== - dependencies: - "@babel/types" "^7.0.0" - -"@babel/helper-plugin-utils@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" - integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== - -"@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.4.4.tgz#a47e02bc91fb259d2e6727c2a30013e3ac13c4a2" - integrity sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q== - dependencies: - lodash "^4.17.11" - -"@babel/helper-remap-async-to-generator@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz#361d80821b6f38da75bd3f0785ece20a88c5fe7f" - integrity sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.0.0" - "@babel/helper-wrap-function" "^7.1.0" - "@babel/template" "^7.1.0" - "@babel/traverse" "^7.1.0" - "@babel/types" "^7.0.0" - -"@babel/helper-replace-supers@^7.1.0", "@babel/helper-replace-supers@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.4.4.tgz#aee41783ebe4f2d3ab3ae775e1cc6f1a90cefa27" - integrity sha512-04xGEnd+s01nY1l15EuMS1rfKktNF+1CkKmHoErDppjAAZL+IUBZpzT748x262HF7fibaQPhbvWUl5HeSt1EXg== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.0.0" - "@babel/helper-optimise-call-expression" "^7.0.0" - "@babel/traverse" "^7.4.4" - "@babel/types" "^7.4.4" - -"@babel/helper-simple-access@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz#65eeb954c8c245beaa4e859da6188f39d71e585c" - integrity sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w== - dependencies: - "@babel/template" "^7.1.0" - "@babel/types" "^7.0.0" - -"@babel/helper-split-export-declaration@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz#ff94894a340be78f53f06af038b205c49d993677" - integrity sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q== - dependencies: - "@babel/types" "^7.4.4" - -"@babel/helper-wrap-function@^7.1.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz#c4e0012445769e2815b55296ead43a958549f6fa" - integrity sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ== - dependencies: - "@babel/helper-function-name" "^7.1.0" - "@babel/template" "^7.1.0" - "@babel/traverse" "^7.1.0" - "@babel/types" "^7.2.0" - -"@babel/helpers@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.4.4.tgz#868b0ef59c1dd4e78744562d5ce1b59c89f2f2a5" - integrity sha512-igczbR/0SeuPR8RFfC7tGrbdTbFL3QTvH6D+Z6zNxnTe//GyqmtHmDkzrqDmyZ3eSwPqB/LhyKoU5DXsp+Vp2A== - dependencies: - "@babel/template" "^7.4.4" - "@babel/traverse" "^7.4.4" - "@babel/types" "^7.4.4" - -"@babel/highlight@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.0.0.tgz#f710c38c8d458e6dd9a201afb637fcb781ce99e4" - integrity sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw== - dependencies: - chalk "^2.0.0" - esutils "^2.0.2" - js-tokens "^4.0.0" - -"@babel/parser@^7.0.0", "@babel/parser@^7.4.4", "@babel/parser@^7.4.5": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.4.5.tgz#04af8d5d5a2b044a2a1bffacc1e5e6673544e872" - integrity sha512-9mUqkL1FF5T7f0WDFfAoDdiMVPWsdD1gZYzSnaXsxUCUqzuch/8of9G3VUSNiZmMBoRxT3neyVsqeiL/ZPcjew== - -"@babel/plugin-proposal-async-generator-functions@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz#b289b306669dce4ad20b0252889a15768c9d417e" - integrity sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-remap-async-to-generator" "^7.1.0" - "@babel/plugin-syntax-async-generators" "^7.2.0" - -"@babel/plugin-proposal-class-properties@^7.0.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.4.4.tgz#93a6486eed86d53452ab9bab35e368e9461198ce" - integrity sha512-WjKTI8g8d5w1Bc9zgwSz2nfrsNQsXcCf9J9cdCvrJV6RF56yztwm4TmJC0MgJ9tvwO9gUA/mcYe89bLdGfiXFg== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.4.4" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-proposal-decorators@^7.1.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.4.4.tgz#de9b2a1a8ab0196f378e2a82f10b6e2a36f21cc0" - integrity sha512-z7MpQz3XC/iQJWXH9y+MaWcLPNSMY9RQSthrLzak8R8hCj0fuyNk+Dzi9kfNe/JxxlWQ2g7wkABbgWjW36MTcw== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.4.4" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-decorators" "^7.2.0" - -"@babel/plugin-proposal-json-strings@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz#568ecc446c6148ae6b267f02551130891e29f317" - integrity sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-json-strings" "^7.2.0" - -"@babel/plugin-proposal-object-rest-spread@^7.3.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.4.4.tgz#1ef173fcf24b3e2df92a678f027673b55e7e3005" - integrity sha512-dMBG6cSPBbHeEBdFXeQ2QLc5gUpg4Vkaz8octD4aoW/ISO+jBOcsuxYL7bsb5WSu8RLP6boxrBIALEHgoHtO9g== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-object-rest-spread" "^7.2.0" - -"@babel/plugin-proposal-optional-catch-binding@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz#135d81edb68a081e55e56ec48541ece8065c38f5" - integrity sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" - -"@babel/plugin-proposal-unicode-property-regex@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.4.4.tgz#501ffd9826c0b91da22690720722ac7cb1ca9c78" - integrity sha512-j1NwnOqMG9mFUOH58JTFsA/+ZYzQLUZ/drqWUqxCYLGeu2JFZL8YrNC9hBxKmWtAuOCHPcRpgv7fhap09Fb4kA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-regex" "^7.4.4" - regexpu-core "^4.5.4" - -"@babel/plugin-syntax-async-generators@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz#69e1f0db34c6f5a0cf7e2b3323bf159a76c8cb7f" - integrity sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-decorators@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.2.0.tgz#c50b1b957dcc69e4b1127b65e1c33eef61570c1b" - integrity sha512-38QdqVoXdHUQfTpZo3rQwqQdWtCn5tMv4uV6r2RMfTqNBuv4ZBhz79SfaQWKTVmxHjeFv/DnXVC/+agHCklYWA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-dynamic-import@^7.0.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz#69c159ffaf4998122161ad8ebc5e6d1f55df8612" - integrity sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-json-strings@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz#72bd13f6ffe1d25938129d2a186b11fd62951470" - integrity sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.2.0.tgz#0b85a3b4bc7cdf4cc4b8bf236335b907ca22e7c7" - integrity sha512-VyN4QANJkRW6lDBmENzRszvZf3/4AXaj9YR7GwrWeeN9tEBPuXbmDYVU9bYBN0D70zCWVwUy0HWq2553VCb6Hw== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-object-rest-spread@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz#3b7a3e733510c57e820b9142a6579ac8b0dfad2e" - integrity sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz#a94013d6eda8908dfe6a477e7f9eda85656ecf5c" - integrity sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-arrow-functions@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz#9aeafbe4d6ffc6563bf8f8372091628f00779550" - integrity sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-async-to-generator@^7.3.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.4.4.tgz#a3f1d01f2f21cadab20b33a82133116f14fb5894" - integrity sha512-YiqW2Li8TXmzgbXw+STsSqPBPFnGviiaSp6CYOq55X8GQ2SGVLrXB6pNid8HkqkZAzOH6knbai3snhP7v0fNwA== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-remap-async-to-generator" "^7.1.0" - -"@babel/plugin-transform-block-scoped-functions@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz#5d3cc11e8d5ddd752aa64c9148d0db6cb79fd190" - integrity sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-block-scoping@^7.3.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.4.4.tgz#c13279fabf6b916661531841a23c4b7dae29646d" - integrity sha512-jkTUyWZcTrwxu5DD4rWz6rDB5Cjdmgz6z7M7RLXOJyCUkFBawssDGcGh8M/0FTSB87avyJI1HsTwUXp9nKA1PA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - lodash "^4.17.11" - -"@babel/plugin-transform-classes@^7.3.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.4.4.tgz#0ce4094cdafd709721076d3b9c38ad31ca715eb6" - integrity sha512-/e44eFLImEGIpL9qPxSRat13I5QNRgBLu2hOQJCF7VLy/otSM/sypV1+XaIw5+502RX/+6YaSAPmldk+nhHDPw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.0.0" - "@babel/helper-define-map" "^7.4.4" - "@babel/helper-function-name" "^7.1.0" - "@babel/helper-optimise-call-expression" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-replace-supers" "^7.4.4" - "@babel/helper-split-export-declaration" "^7.4.4" - globals "^11.1.0" - -"@babel/plugin-transform-computed-properties@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz#83a7df6a658865b1c8f641d510c6f3af220216da" - integrity sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-destructuring@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.4.4.tgz#9d964717829cc9e4b601fc82a26a71a4d8faf20f" - integrity sha512-/aOx+nW0w8eHiEHm+BTERB2oJn5D127iye/SUQl7NjHy0lf+j7h4MKMMSOwdazGq9OxgiNADncE+SRJkCxjZpQ== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-dotall-regex@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.4.4.tgz#361a148bc951444312c69446d76ed1ea8e4450c3" - integrity sha512-P05YEhRc2h53lZDjRPk/OektxCVevFzZs2Gfjd545Wde3k+yFDbXORgl2e0xpbq8mLcKJ7Idss4fAg0zORN/zg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-regex" "^7.4.4" - regexpu-core "^4.5.4" - -"@babel/plugin-transform-duplicate-keys@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.2.0.tgz#d952c4930f312a4dbfff18f0b2914e60c35530b3" - integrity sha512-q+yuxW4DsTjNceUiTzK0L+AfQ0zD9rWaTLiUqHA8p0gxx7lu1EylenfzjeIWNkPy6e/0VG/Wjw9uf9LueQwLOw== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-exponentiation-operator@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz#a63868289e5b4007f7054d46491af51435766008" - integrity sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.1.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-for-of@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz#0267fc735e24c808ba173866c6c4d1440fc3c556" - integrity sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-function-name@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz#e1436116abb0610c2259094848754ac5230922ad" - integrity sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA== - dependencies: - "@babel/helper-function-name" "^7.1.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-literals@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz#690353e81f9267dad4fd8cfd77eafa86aba53ea1" - integrity sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-modules-amd@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.2.0.tgz#82a9bce45b95441f617a24011dc89d12da7f4ee6" - integrity sha512-mK2A8ucqz1qhrdqjS9VMIDfIvvT2thrEsIQzbaTdc5QFzhDjQv2CkJJ5f6BXIkgbmaoax3zBr2RyvV/8zeoUZw== - dependencies: - "@babel/helper-module-transforms" "^7.1.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-modules-commonjs@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.4.4.tgz#0bef4713d30f1d78c2e59b3d6db40e60192cac1e" - integrity sha512-4sfBOJt58sEo9a2BQXnZq+Q3ZTSAUXyK3E30o36BOGnJ+tvJ6YSxF0PG6kERvbeISgProodWuI9UVG3/FMY6iw== - dependencies: - "@babel/helper-module-transforms" "^7.4.4" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-simple-access" "^7.1.0" - -"@babel/plugin-transform-modules-systemjs@^7.3.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.4.4.tgz#dc83c5665b07d6c2a7b224c00ac63659ea36a405" - integrity sha512-MSiModfILQc3/oqnG7NrP1jHaSPryO6tA2kOMmAQApz5dayPxWiHqmq4sWH2xF5LcQK56LlbKByCd8Aah/OIkQ== - dependencies: - "@babel/helper-hoist-variables" "^7.4.4" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-modules-umd@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz#7678ce75169f0877b8eb2235538c074268dd01ae" - integrity sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw== - dependencies: - "@babel/helper-module-transforms" "^7.1.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.3.0": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.4.5.tgz#9d269fd28a370258199b4294736813a60bbdd106" - integrity sha512-z7+2IsWafTBbjNsOxU/Iv5CvTJlr5w4+HGu1HovKYTtgJ362f7kBcQglkfmlspKKZ3bgrbSGvLfNx++ZJgCWsg== - dependencies: - regexp-tree "^0.1.6" - -"@babel/plugin-transform-new-target@^7.0.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz#18d120438b0cc9ee95a47f2c72bc9768fbed60a5" - integrity sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-object-super@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.2.0.tgz#b35d4c10f56bab5d650047dad0f1d8e8814b6598" - integrity sha512-VMyhPYZISFZAqAPVkiYb7dUe2AsVi2/wCT5+wZdsNO31FojQJa9ns40hzZ6U9f50Jlq4w6qwzdBB2uwqZ00ebg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-replace-supers" "^7.1.0" - -"@babel/plugin-transform-parameters@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz#7556cf03f318bd2719fe4c922d2d808be5571e16" - integrity sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw== - dependencies: - "@babel/helper-call-delegate" "^7.4.4" - "@babel/helper-get-function-arity" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-regenerator@^7.3.4": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz#629dc82512c55cee01341fb27bdfcb210354680f" - integrity sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA== - dependencies: - regenerator-transform "^0.14.0" - -"@babel/plugin-transform-runtime@^7.4.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.4.4.tgz#a50f5d16e9c3a4ac18a1a9f9803c107c380bce08" - integrity sha512-aMVojEjPszvau3NRg+TIH14ynZLvPewH4xhlCW1w6A3rkxTS1m4uwzRclYR9oS+rl/dr+kT+pzbfHuAWP/lc7Q== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - resolve "^1.8.1" - semver "^5.5.1" - -"@babel/plugin-transform-shorthand-properties@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz#6333aee2f8d6ee7e28615457298934a3b46198f0" - integrity sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-spread@^7.2.0": - version "7.2.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.2.2.tgz#3103a9abe22f742b6d406ecd3cd49b774919b406" - integrity sha512-KWfky/58vubwtS0hLqEnrWJjsMGaOeSBn90Ezn5Jeg9Z8KKHmELbP1yGylMlm5N6TPKeY9A2+UaSYLdxahg01w== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-sticky-regex@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz#a1e454b5995560a9c1e0d537dfc15061fd2687e1" - integrity sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-regex" "^7.0.0" - -"@babel/plugin-transform-template-literals@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz#9d28fea7bbce637fb7612a0750989d8321d4bcb0" - integrity sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g== - dependencies: - "@babel/helper-annotate-as-pure" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-typeof-symbol@^7.2.0": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz#117d2bcec2fbf64b4b59d1f9819894682d29f2b2" - integrity sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - -"@babel/plugin-transform-unicode-regex@^7.2.0": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.4.4.tgz#ab4634bb4f14d36728bf5978322b35587787970f" - integrity sha512-il+/XdNw01i93+M9J9u4T7/e/Ue/vWfNZE4IRUQjplu2Mqb/AFTDimkw2tdEdSH50wuQXZAbXSql0UphQke+vA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/helper-regex" "^7.4.4" - regexpu-core "^4.5.4" - -"@babel/preset-env@^7.0.0 < 7.4.0": - version "7.3.4" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.3.4.tgz#887cf38b6d23c82f19b5135298bdb160062e33e1" - integrity sha512-2mwqfYMK8weA0g0uBKOt4FE3iEodiHy9/CW0b+nWXcbL+pGzLx8ESYc+j9IIxr6LTDHWKgPm71i9smo02bw+gA== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-async-generator-functions" "^7.2.0" - "@babel/plugin-proposal-json-strings" "^7.2.0" - "@babel/plugin-proposal-object-rest-spread" "^7.3.4" - "@babel/plugin-proposal-optional-catch-binding" "^7.2.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.2.0" - "@babel/plugin-syntax-async-generators" "^7.2.0" - "@babel/plugin-syntax-json-strings" "^7.2.0" - "@babel/plugin-syntax-object-rest-spread" "^7.2.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" - "@babel/plugin-transform-arrow-functions" "^7.2.0" - "@babel/plugin-transform-async-to-generator" "^7.3.4" - "@babel/plugin-transform-block-scoped-functions" "^7.2.0" - "@babel/plugin-transform-block-scoping" "^7.3.4" - "@babel/plugin-transform-classes" "^7.3.4" - "@babel/plugin-transform-computed-properties" "^7.2.0" - "@babel/plugin-transform-destructuring" "^7.2.0" - "@babel/plugin-transform-dotall-regex" "^7.2.0" - "@babel/plugin-transform-duplicate-keys" "^7.2.0" - "@babel/plugin-transform-exponentiation-operator" "^7.2.0" - "@babel/plugin-transform-for-of" "^7.2.0" - "@babel/plugin-transform-function-name" "^7.2.0" - "@babel/plugin-transform-literals" "^7.2.0" - "@babel/plugin-transform-modules-amd" "^7.2.0" - "@babel/plugin-transform-modules-commonjs" "^7.2.0" - "@babel/plugin-transform-modules-systemjs" "^7.3.4" - "@babel/plugin-transform-modules-umd" "^7.2.0" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.3.0" - "@babel/plugin-transform-new-target" "^7.0.0" - "@babel/plugin-transform-object-super" "^7.2.0" - "@babel/plugin-transform-parameters" "^7.2.0" - "@babel/plugin-transform-regenerator" "^7.3.4" - "@babel/plugin-transform-shorthand-properties" "^7.2.0" - "@babel/plugin-transform-spread" "^7.2.0" - "@babel/plugin-transform-sticky-regex" "^7.2.0" - "@babel/plugin-transform-template-literals" "^7.2.0" - "@babel/plugin-transform-typeof-symbol" "^7.2.0" - "@babel/plugin-transform-unicode-regex" "^7.2.0" - browserslist "^4.3.4" - invariant "^2.2.2" - js-levenshtein "^1.1.3" - semver "^5.3.0" - -"@babel/runtime-corejs2@^7.2.0": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs2/-/runtime-corejs2-7.4.5.tgz#3d892f0560df21bafb384dd7727e33853e95d3c9" - integrity sha512-5yLuwzvIDecKwYMzJtiarky4Fb5643H3Ao5jwX0HrMR5oM5mn2iHH9wSZonxwNK0oAjAFUQAiOd4jT7/9Y2jMQ== - dependencies: - core-js "^2.6.5" - regenerator-runtime "^0.13.2" - -"@babel/runtime@^7.0.0": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.4.5.tgz#582bb531f5f9dc67d2fcb682979894f75e253f12" - integrity sha512-TuI4qpWZP6lGOGIuGWtp9sPluqYICmbk8T/1vpSysqJxRPkudh/ofFWyqdcMsDf2s7KvDL4/YHgKyvcS3g9CJQ== - dependencies: - regenerator-runtime "^0.13.2" - -"@babel/template@^7.1.0", "@babel/template@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.4.4.tgz#f4b88d1225689a08f5bc3a17483545be9e4ed237" - integrity sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.4.4" - "@babel/types" "^7.4.4" - -"@babel/traverse@^7.0.0", "@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.4.5": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.4.5.tgz#4e92d1728fd2f1897dafdd321efbff92156c3216" - integrity sha512-Vc+qjynwkjRmIFGxy0KYoPj4FdVDxLej89kMHFsWScq999uX+pwcX4v9mWRjW0KcAYTPAuVQl2LKP1wEVLsp+A== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/generator" "^7.4.4" - "@babel/helper-function-name" "^7.1.0" - "@babel/helper-split-export-declaration" "^7.4.4" - "@babel/parser" "^7.4.5" - "@babel/types" "^7.4.4" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.11" - -"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4": - version "7.4.4" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.4.4.tgz#8db9e9a629bb7c29370009b4b779ed93fe57d5f0" - integrity sha512-dOllgYdnEFOebhkKCjzSVFqw/PmmB8pH6RGOWkY4GsboQNd47b1fBThBSwlHAq9alF9vc1M3+6oqR47R50L0tQ== - dependencies: - esutils "^2.0.2" - lodash "^4.17.11" - to-fast-properties "^2.0.0" - -"@intervolga/optimize-cssnano-plugin@^1.0.5": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@intervolga/optimize-cssnano-plugin/-/optimize-cssnano-plugin-1.0.6.tgz#be7c7846128b88f6a9b1d1261a0ad06eb5c0fdf8" - integrity sha512-zN69TnSr0viRSU6cEDIcuPcP67QcpQ6uHACg58FiN9PDrU6SLyGW3MR4tiISbYxy1kDWAVPwD+XwQTWE5cigAA== - dependencies: - cssnano "^4.0.0" - cssnano-preset-default "^4.0.0" - postcss "^7.0.0" - -"@mrmlnc/readdir-enhanced@^2.2.1": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz#524af240d1a360527b730475ecfa1344aa540dde" - integrity sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g== - dependencies: - call-me-maybe "^1.0.1" - glob-to-regexp "^0.3.0" - -"@nodelib/fs.stat@^1.1.2": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b" - integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== - -"@sindresorhus/is@^0.7.0": - version "0.7.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd" - integrity sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow== - -"@soda/friendly-errors-webpack-plugin@^1.7.1": - version "1.7.1" - resolved "https://registry.yarnpkg.com/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.7.1.tgz#706f64bcb4a8b9642b48ae3ace444c70334d615d" - integrity sha512-cWKrGaFX+rfbMrAxVv56DzhPNqOJPZuNIS2HGMELtgGzb+vsMzyig9mml5gZ/hr2BGtSLV+dP2LUEuAL8aG2mQ== - dependencies: - chalk "^1.1.3" - error-stack-parser "^2.0.0" - string-width "^2.0.0" - -"@types/events@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" - integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g== - -"@types/glob@^7.1.1": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575" - integrity sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w== - dependencies: - "@types/events" "*" - "@types/minimatch" "*" - "@types/node" "*" - -"@types/minimatch@*": - version "3.0.3" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" - integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== - -"@types/node@*": - version "12.0.2" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.0.2.tgz#3452a24edf9fea138b48fad4a0a028a683da1e40" - integrity sha512-5tabW/i+9mhrfEOUcLDu2xBPsHJ+X5Orqy9FKpale3SjDA17j5AEpYq5vfy3oAeAHGcvANRCO3NV3d2D6q3NiA== - -"@types/normalize-package-data@^2.4.0": - version "2.4.0" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" - integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== - -"@types/q@^1.5.1": - version "1.5.2" - resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.2.tgz#690a1475b84f2a884fd07cd797c00f5f31356ea8" - integrity sha512-ce5d3q03Ex0sy4R14722Rmt6MT07Ua+k4FwDfdcToYJcMKNtRVQvJ6JCAPdAmAnbRb6CsX6aYb9m96NGod9uTw== - -"@vue/babel-helper-vue-jsx-merge-props@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-helper-vue-jsx-merge-props/-/babel-helper-vue-jsx-merge-props-1.0.0.tgz#048fe579958da408fb7a8b2a3ec050b50a661040" - integrity sha512-6tyf5Cqm4m6v7buITuwS+jHzPlIPxbFzEhXR5JGZpbrvOcp1hiQKckd305/3C7C36wFekNTQSxAtgeM0j0yoUw== - -"@vue/babel-plugin-transform-vue-jsx@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-plugin-transform-vue-jsx/-/babel-plugin-transform-vue-jsx-1.0.0.tgz#ebcbf39c312c94114c8c4f407ee4f6c97aa45432" - integrity sha512-U+JNwVQSmaLKjO3lzCUC3cNXxprgezV1N+jOdqbP4xWNaqtWUCJnkjTVcgECM18A/AinDKPcUUeoyhU7yxUxXQ== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/plugin-syntax-jsx" "^7.2.0" - "@vue/babel-helper-vue-jsx-merge-props" "^1.0.0" - html-tags "^2.0.0" - lodash.kebabcase "^4.1.1" - svg-tags "^1.0.0" - -"@vue/babel-preset-app@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-3.7.0.tgz#f37535ea60b71732ddd4395ec143aaa0b10d4c67" - integrity sha512-6PHZ1TYO8OGy22TLyKm/+VmCzLB9L1UxaA3CFxXJH0h/YUOmgdmuAk3AWhomYSwk2GF51On3aQzYouoaWhvBDQ== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@babel/plugin-proposal-class-properties" "^7.0.0" - "@babel/plugin-proposal-decorators" "^7.1.0" - "@babel/plugin-syntax-dynamic-import" "^7.0.0" - "@babel/plugin-syntax-jsx" "^7.0.0" - "@babel/plugin-transform-runtime" "^7.4.0" - "@babel/preset-env" "^7.0.0 < 7.4.0" - "@babel/runtime" "^7.0.0" - "@babel/runtime-corejs2" "^7.2.0" - "@vue/babel-preset-jsx" "^1.0.0-beta.3" - babel-plugin-dynamic-import-node "^2.2.0" - babel-plugin-module-resolver "3.2.0" - core-js "^2.6.5" - -"@vue/babel-preset-jsx@^1.0.0-beta.3": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.0.0.tgz#e515cd453a5a8ea6b0f30b2bb92f266d8ab4e9f5" - integrity sha512-5CbDu/QHS+TtQNw5aYAffiMxBBB2Eo9+RJpS8X+6FJbdG5Rvc4TVipEqkrg0pJviWadNg7TEy0Uz4o7VNXeIZw== - dependencies: - "@vue/babel-helper-vue-jsx-merge-props" "^1.0.0" - "@vue/babel-plugin-transform-vue-jsx" "^1.0.0" - "@vue/babel-sugar-functional-vue" "^1.0.0" - "@vue/babel-sugar-inject-h" "^1.0.0" - "@vue/babel-sugar-v-model" "^1.0.0" - "@vue/babel-sugar-v-on" "^1.0.0" - -"@vue/babel-sugar-functional-vue@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-sugar-functional-vue/-/babel-sugar-functional-vue-1.0.0.tgz#17e2c4ca27b74b244da3b923240ec91d10048cb3" - integrity sha512-XE/jNaaorTuhWayCz+QClk5AB9OV5HzrwbzEC6sIUY0J60A28ONQKeTwxfidW42egOkqNH/UU6eE3KLfmiDj0Q== - dependencies: - "@babel/plugin-syntax-jsx" "^7.2.0" - -"@vue/babel-sugar-inject-h@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-sugar-inject-h/-/babel-sugar-inject-h-1.0.0.tgz#e5efb6c5b5b7988dc03831af6d133bf7bcde6347" - integrity sha512-NxWU+DqtbZgfGvd25GPoFMj+rvyQ8ZA1pHj8vIeqRij+vx3sXoKkObjA9ulZunvWw5F6uG9xYy4ytpxab/X+Hg== - dependencies: - "@babel/plugin-syntax-jsx" "^7.2.0" - -"@vue/babel-sugar-v-model@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.0.0.tgz#f4da56aa67f65a349bd2c269a95e72e601af4613" - integrity sha512-Pfg2Al0io66P1eO6zUbRIgpyKCU2qTnumiE0lao/wA/uNdb7Dx5Tfd1W6tO5SsByETPnEs8i8+gawRIXX40rFw== - dependencies: - "@babel/plugin-syntax-jsx" "^7.2.0" - "@vue/babel-helper-vue-jsx-merge-props" "^1.0.0" - "@vue/babel-plugin-transform-vue-jsx" "^1.0.0" - camelcase "^5.0.0" - html-tags "^2.0.0" - svg-tags "^1.0.0" - -"@vue/babel-sugar-v-on@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-on/-/babel-sugar-v-on-1.0.0.tgz#a633ee8fe205763e865b011246981b7f89668033" - integrity sha512-2aqJaDLKdSSGlxZU+GjFERaSNUaa6DQreV+V/K4W/6Lxj8520/r1lChWEa/zuAoPD2Vhy0D2QrqqO+I0D6CkKw== - dependencies: - "@babel/plugin-syntax-jsx" "^7.2.0" - "@vue/babel-plugin-transform-vue-jsx" "^1.0.0" - camelcase "^5.0.0" - -"@vue/cli-overlay@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/cli-overlay/-/cli-overlay-3.7.0.tgz#0f520c98e1be7618b7a68b768666fffa1f589f94" - integrity sha512-QO1rsBVKPZrt+5rHSZXc5UEPVwVgiayOk/cDl+GwSJoR36gnWs1wy1oUX1Awd7QpGiMBK/1+A7aAGhfzKR23Cg== - -"@vue/cli-plugin-babel@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/cli-plugin-babel/-/cli-plugin-babel-3.7.0.tgz#2be01288980b058f097d26812f65d4d4e8136cca" - integrity sha512-QysJYerzaGzvJ5iT61KpE4hFHiDU8NQ7QjSwIkOAJAx0KY8o0WCjLpAVvjmKtZqNXPBc5Jc3P+eeaz9qQPWNeQ== - dependencies: - "@babel/core" "^7.0.0" - "@vue/babel-preset-app" "^3.7.0" - "@vue/cli-shared-utils" "^3.7.0" - babel-loader "^8.0.5" - webpack ">=4 < 4.29" - -"@vue/cli-plugin-eslint@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/cli-plugin-eslint/-/cli-plugin-eslint-3.7.0.tgz#6b495fe3c82ec94347c424a9de3cca467a53f90e" - integrity sha512-oFdOLQu6PQKbxinF55XH1lH8hgiDRyb3gIvSKu5YV5r6dnsRdKDxOKLE1PTbaZzQot3Ny/Y7gk025x1qpni3IA== - dependencies: - "@vue/cli-shared-utils" "^3.7.0" - babel-eslint "^10.0.1" - eslint-loader "^2.1.2" - globby "^9.2.0" - webpack ">=4 < 4.29" - optionalDependencies: - eslint "^4.19.1" - eslint-plugin-vue "^4.7.1" - -"@vue/cli-service@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/cli-service/-/cli-service-3.7.0.tgz#af56526cea64042b48c50a15a9d33c84a71abd31" - integrity sha512-RMVwpCE3EB9cL9VAgu1Dy/tGxz5zrVG4UMPk5t4KDu8jJhHxvcAzgIEIfS6KRp0AKfA6iDW4J0NU0fopnpyL+g== - dependencies: - "@intervolga/optimize-cssnano-plugin" "^1.0.5" - "@soda/friendly-errors-webpack-plugin" "^1.7.1" - "@vue/cli-overlay" "^3.7.0" - "@vue/cli-shared-utils" "^3.7.0" - "@vue/component-compiler-utils" "^2.6.0" - "@vue/preload-webpack-plugin" "^1.1.0" - "@vue/web-component-wrapper" "^1.2.0" - acorn "^6.1.1" - acorn-walk "^6.1.1" - address "^1.0.3" - autoprefixer "^9.5.1" - browserslist "^4.5.4" - cache-loader "^2.0.1" - case-sensitive-paths-webpack-plugin "^2.2.0" - chalk "^2.4.2" - cli-highlight "^2.1.0" - clipboardy "^2.0.0" - cliui "^5.0.0" - copy-webpack-plugin "^4.6.0" - css-loader "^1.0.1" - cssnano "^4.1.10" - current-script-polyfill "^1.0.0" - debug "^4.1.1" - dotenv "^7.0.0" - dotenv-expand "^5.1.0" - escape-string-regexp "^1.0.5" - file-loader "^3.0.1" - fs-extra "^7.0.1" - globby "^9.2.0" - hash-sum "^1.0.2" - html-webpack-plugin "^3.2.0" - launch-editor-middleware "^2.2.1" - lodash.defaultsdeep "^4.6.0" - lodash.mapvalues "^4.6.0" - lodash.transform "^4.6.0" - mini-css-extract-plugin "^0.6.0" - minimist "^1.2.0" - ora "^3.4.0" - portfinder "^1.0.20" - postcss-loader "^3.0.0" - read-pkg "^5.0.0" - semver "^6.0.0" - slash "^2.0.0" - source-map-url "^0.4.0" - ssri "^6.0.1" - string.prototype.padend "^3.0.0" - terser-webpack-plugin "^1.2.3" - thread-loader "^2.1.2" - url-loader "^1.1.2" - vue-loader "^15.7.0" - webpack ">=4 < 4.29" - webpack-bundle-analyzer "^3.3.0" - webpack-chain "^4.11.0" - webpack-dev-server "^3.3.1" - webpack-merge "^4.2.1" - yorkie "^2.0.0" - -"@vue/cli-shared-utils@^3.7.0": - version "3.7.0" - resolved "https://registry.yarnpkg.com/@vue/cli-shared-utils/-/cli-shared-utils-3.7.0.tgz#957dd3c31a31208caf9f119cac6008fd4960d46e" - integrity sha512-+LPDAQ1CE3ci1ADOvNqJMPdqyxgJxOq5HUgGDSKCHwviXF6GtynfljZXiSzgWh5ueMFxJphCfeMsTZqFWwsHVg== - dependencies: - chalk "^2.4.1" - execa "^1.0.0" - joi "^14.3.0" - launch-editor "^2.2.1" - lru-cache "^5.1.1" - node-ipc "^9.1.1" - opn "^5.3.0" - ora "^3.4.0" - request "^2.87.0" - request-promise-native "^1.0.7" - semver "^6.0.0" - string.prototype.padstart "^3.0.0" - -"@vue/component-compiler-utils@^2.5.1", "@vue/component-compiler-utils@^2.6.0": - version "2.6.0" - resolved "https://registry.yarnpkg.com/@vue/component-compiler-utils/-/component-compiler-utils-2.6.0.tgz#aa46d2a6f7647440b0b8932434d22f12371e543b" - integrity sha512-IHjxt7LsOFYc0DkTncB7OXJL7UzwOLPPQCfEUNyxL2qt+tF12THV+EO33O1G2Uk4feMSWua3iD39Itszx0f0bw== - dependencies: - consolidate "^0.15.1" - hash-sum "^1.0.2" - lru-cache "^4.1.2" - merge-source-map "^1.1.0" - postcss "^7.0.14" - postcss-selector-parser "^5.0.0" - prettier "1.16.3" - source-map "~0.6.1" - vue-template-es2015-compiler "^1.9.0" - -"@vue/preload-webpack-plugin@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@vue/preload-webpack-plugin/-/preload-webpack-plugin-1.1.0.tgz#d768dba004261c029b53a77c5ea2d5f9ee4f3cce" - integrity sha512-rcn2KhSHESBFMPj5vc5X2pI9bcBNQQixvJXhD5gZ4rN2iym/uH2qfDSQfUS5+qwiz0a85TCkeUs6w6jxFDudbw== - -"@vue/web-component-wrapper@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@vue/web-component-wrapper/-/web-component-wrapper-1.2.0.tgz#bb0e46f1585a7e289b4ee6067dcc5a6ae62f1dd1" - integrity sha512-Xn/+vdm9CjuC9p3Ae+lTClNutrVhsXpzxvoTXXtoys6kVRX9FkueSUAqSWAyZntmVLlR4DosBV4pH8y5Z/HbUw== - -"@webassemblyjs/ast@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.7.11.tgz#b988582cafbb2b095e8b556526f30c90d057cace" - integrity sha512-ZEzy4vjvTzScC+SH8RBssQUawpaInUdMTYwYYLh54/s8TuT0gBLuyUnppKsVyZEi876VmmStKsUs28UxPgdvrA== - dependencies: - "@webassemblyjs/helper-module-context" "1.7.11" - "@webassemblyjs/helper-wasm-bytecode" "1.7.11" - "@webassemblyjs/wast-parser" "1.7.11" - -"@webassemblyjs/floating-point-hex-parser@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.7.11.tgz#a69f0af6502eb9a3c045555b1a6129d3d3f2e313" - integrity sha512-zY8dSNyYcgzNRNT666/zOoAyImshm3ycKdoLsyDw/Bwo6+/uktb7p4xyApuef1dwEBo/U/SYQzbGBvV+nru2Xg== - -"@webassemblyjs/helper-api-error@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.7.11.tgz#c7b6bb8105f84039511a2b39ce494f193818a32a" - integrity sha512-7r1qXLmiglC+wPNkGuXCvkmalyEstKVwcueZRP2GNC2PAvxbLYwLLPr14rcdJaE4UtHxQKfFkuDFuv91ipqvXg== - -"@webassemblyjs/helper-buffer@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.7.11.tgz#3122d48dcc6c9456ed982debe16c8f37101df39b" - integrity sha512-MynuervdylPPh3ix+mKZloTcL06P8tenNH3sx6s0qE8SLR6DdwnfgA7Hc9NSYeob2jrW5Vql6GVlsQzKQCa13w== - -"@webassemblyjs/helper-code-frame@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.7.11.tgz#cf8f106e746662a0da29bdef635fcd3d1248364b" - integrity sha512-T8ESC9KMXFTXA5urJcyor5cn6qWeZ4/zLPyWeEXZ03hj/x9weSokGNkVCdnhSabKGYWxElSdgJ+sFa9G/RdHNw== - dependencies: - "@webassemblyjs/wast-printer" "1.7.11" - -"@webassemblyjs/helper-fsm@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.7.11.tgz#df38882a624080d03f7503f93e3f17ac5ac01181" - integrity sha512-nsAQWNP1+8Z6tkzdYlXT0kxfa2Z1tRTARd8wYnc/e3Zv3VydVVnaeePgqUzFrpkGUyhUUxOl5ML7f1NuT+gC0A== - -"@webassemblyjs/helper-module-context@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.7.11.tgz#d874d722e51e62ac202476935d649c802fa0e209" - integrity sha512-JxfD5DX8Ygq4PvXDucq0M+sbUFA7BJAv/GGl9ITovqE+idGX+J3QSzJYz+LwQmL7fC3Rs+utvWoJxDb6pmC0qg== - -"@webassemblyjs/helper-wasm-bytecode@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.7.11.tgz#dd9a1e817f1c2eb105b4cf1013093cb9f3c9cb06" - integrity sha512-cMXeVS9rhoXsI9LLL4tJxBgVD/KMOKXuFqYb5oCJ/opScWpkCMEz9EJtkonaNcnLv2R3K5jIeS4TRj/drde1JQ== - -"@webassemblyjs/helper-wasm-section@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.7.11.tgz#9c9ac41ecf9fbcfffc96f6d2675e2de33811e68a" - integrity sha512-8ZRY5iZbZdtNFE5UFunB8mmBEAbSI3guwbrsCl4fWdfRiAcvqQpeqd5KHhSWLL5wuxo53zcaGZDBU64qgn4I4Q== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-buffer" "1.7.11" - "@webassemblyjs/helper-wasm-bytecode" "1.7.11" - "@webassemblyjs/wasm-gen" "1.7.11" - -"@webassemblyjs/ieee754@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.7.11.tgz#c95839eb63757a31880aaec7b6512d4191ac640b" - integrity sha512-Mmqx/cS68K1tSrvRLtaV/Lp3NZWzXtOHUW2IvDvl2sihAwJh4ACE0eL6A8FvMyDG9abes3saB6dMimLOs+HMoQ== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.7.11.tgz#d7267a1ee9c4594fd3f7e37298818ec65687db63" - integrity sha512-vuGmgZjjp3zjcerQg+JA+tGOncOnJLWVkt8Aze5eWQLwTQGNgVLcyOTqgSCxWTR4J42ijHbBxnuRaL1Rv7XMdw== - dependencies: - "@xtuc/long" "4.2.1" - -"@webassemblyjs/utf8@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.7.11.tgz#06d7218ea9fdc94a6793aa92208160db3d26ee82" - integrity sha512-C6GFkc7aErQIAH+BMrIdVSmW+6HSe20wg57HEC1uqJP8E/xpMjXqQUxkQw07MhNDSDcGpxI9G5JSNOQCqJk4sA== - -"@webassemblyjs/wasm-edit@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.7.11.tgz#8c74ca474d4f951d01dbae9bd70814ee22a82005" - integrity sha512-FUd97guNGsCZQgeTPKdgxJhBXkUbMTY6hFPf2Y4OedXd48H97J+sOY2Ltaq6WGVpIH8o/TGOVNiVz/SbpEMJGg== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-buffer" "1.7.11" - "@webassemblyjs/helper-wasm-bytecode" "1.7.11" - "@webassemblyjs/helper-wasm-section" "1.7.11" - "@webassemblyjs/wasm-gen" "1.7.11" - "@webassemblyjs/wasm-opt" "1.7.11" - "@webassemblyjs/wasm-parser" "1.7.11" - "@webassemblyjs/wast-printer" "1.7.11" - -"@webassemblyjs/wasm-gen@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.7.11.tgz#9bbba942f22375686a6fb759afcd7ac9c45da1a8" - integrity sha512-U/KDYp7fgAZX5KPfq4NOupK/BmhDc5Kjy2GIqstMhvvdJRcER/kUsMThpWeRP8BMn4LXaKhSTggIJPOeYHwISA== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-wasm-bytecode" "1.7.11" - "@webassemblyjs/ieee754" "1.7.11" - "@webassemblyjs/leb128" "1.7.11" - "@webassemblyjs/utf8" "1.7.11" - -"@webassemblyjs/wasm-opt@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.7.11.tgz#b331e8e7cef8f8e2f007d42c3a36a0580a7d6ca7" - integrity sha512-XynkOwQyiRidh0GLua7SkeHvAPXQV/RxsUeERILmAInZegApOUAIJfRuPYe2F7RcjOC9tW3Cb9juPvAC/sCqvg== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-buffer" "1.7.11" - "@webassemblyjs/wasm-gen" "1.7.11" - "@webassemblyjs/wasm-parser" "1.7.11" - -"@webassemblyjs/wasm-parser@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.7.11.tgz#6e3d20fa6a3519f6b084ef9391ad58211efb0a1a" - integrity sha512-6lmXRTrrZjYD8Ng8xRyvyXQJYUQKYSXhJqXOBLw24rdiXsHAOlvw5PhesjdcaMadU/pyPQOJ5dHreMjBxwnQKg== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-api-error" "1.7.11" - "@webassemblyjs/helper-wasm-bytecode" "1.7.11" - "@webassemblyjs/ieee754" "1.7.11" - "@webassemblyjs/leb128" "1.7.11" - "@webassemblyjs/utf8" "1.7.11" - -"@webassemblyjs/wast-parser@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.7.11.tgz#25bd117562ca8c002720ff8116ef9072d9ca869c" - integrity sha512-lEyVCg2np15tS+dm7+JJTNhNWq9yTZvi3qEhAIIOaofcYlUp0UR5/tVqOwa/gXYr3gjwSZqw+/lS9dscyLelbQ== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/floating-point-hex-parser" "1.7.11" - "@webassemblyjs/helper-api-error" "1.7.11" - "@webassemblyjs/helper-code-frame" "1.7.11" - "@webassemblyjs/helper-fsm" "1.7.11" - "@xtuc/long" "4.2.1" - -"@webassemblyjs/wast-printer@1.7.11": - version "1.7.11" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.7.11.tgz#c4245b6de242cb50a2cc950174fdbf65c78d7813" - integrity sha512-m5vkAsuJ32QpkdkDOUPGSltrg8Cuk3KBx4YrmAGQwCZPRdUHXxG4phIOuuycLemHFr74sWL9Wthqss4fzdzSwg== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/wast-parser" "1.7.11" - "@xtuc/long" "4.2.1" - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.1": - version "4.2.1" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.1.tgz#5c85d662f76fa1d34575766c5dcd6615abcd30d8" - integrity sha512-FZdkNBDqBRHKQ2MEbSC17xnPFOhZxeJ2YGSfr2BKf3sujG49Qe3bB+rGCwQfIaA7WHnGeGkSijX4FuBCdrzW/g== - -abbrev@1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" - integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" - integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== - dependencies: - mime-types "~2.1.24" - negotiator "0.6.2" - -acorn-dynamic-import@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-3.0.0.tgz#901ceee4c7faaef7e07ad2a47e890675da50a278" - integrity sha512-zVWV8Z8lislJoOKKqdNMOB+s6+XV5WERty8MnKBeFgwA+19XJjJHs2RP5dzM57FftIs+jQnRToLiWazKr6sSWg== - dependencies: - acorn "^5.0.0" - -acorn-jsx@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" - integrity sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s= - dependencies: - acorn "^3.0.4" - -acorn-walk@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-6.1.1.tgz#d363b66f5fac5f018ff9c3a1e7b6f8e310cc3913" - integrity sha512-OtUw6JUTgxA2QoqqmrmQ7F2NYqiBPi/L2jqHyFtllhOUvXYQXf0Z1CYUinIfyT4bTCGmrA7gX9FvHA81uzCoVw== - -acorn@^3.0.4: - version "3.3.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" - integrity sha1-ReN/s56No/JbruP/U2niu18iAXo= - -acorn@^5.0.0, acorn@^5.5.0, acorn@^5.6.2: - version "5.7.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.3.tgz#67aa231bf8812974b85235a96771eb6bd07ea279" - integrity sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw== - -acorn@^6.0.7, acorn@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.1.1.tgz#7d25ae05bb8ad1f9b699108e1094ecd7884adc1f" - integrity sha512-jPTiwtOxaHNaAPg/dmrJ/beuzLRnXtB0kQPQ8JpotKJgTB6rX6c8mlf315941pyjBSaPg8NHXS9fhP4u17DpGA== - -address@^1.0.3: - version "1.1.0" - resolved "https://registry.yarnpkg.com/address/-/address-1.1.0.tgz#ef8e047847fcd2c5b6f50c16965f924fd99fe709" - integrity sha512-4diPfzWbLEIElVG4AnqP+00SULlPzNuyJFNnmMrLgyaxG6tZXJ1sn7mjBu4fHrJE+Yp/jgylOweJn2xsLMFggQ== - -ajv-errors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" - integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== - -ajv-keywords@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762" - integrity sha1-YXmX/F9gV2iUxDX5QNgZ4TW4B2I= - -ajv-keywords@^3.1.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.0.tgz#4b831e7b531415a7cc518cd404e73f6193c6349d" - integrity sha512-aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw== - -ajv@^5.2.3, ajv@^5.3.0: - version "5.5.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" - integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= - dependencies: - co "^4.6.0" - fast-deep-equal "^1.0.0" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.3.0" - -ajv@^6.1.0, ajv@^6.5.5: - version "6.10.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.0.tgz#90d0d54439da587cd7e843bfb7045f50bd22bdf1" - integrity sha512-nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg== - dependencies: - fast-deep-equal "^2.0.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -alphanum-sort@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3" - integrity sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM= - -ansi-colors@^3.0.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== - -ansi-escapes@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== - -ansi-html@0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" - integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4= - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= - -ansi-regex@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" - integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - -ansi-styles@^3.2.0, ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -any-promise@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" - integrity sha1-q8av7tzqUugJzcA3au0845Y10X8= - -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" - -aproba@^1.0.3, aproba@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - -arch@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/arch/-/arch-2.1.1.tgz#8f5c2731aa35a30929221bb0640eed65175ec84e" - integrity sha512-BLM56aPo9vLLFVa8+/+pJLnrZ7QGGTVHWsCwieAWT9o9K8UeGaQbzZbGoabWLOo2ksBCztoXdqBZBplqLDDCSg== - -are-we-there-yet@~1.1.2: - version "1.1.5" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" - integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== - dependencies: - delegates "^1.0.0" - readable-stream "^2.0.6" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-filter@~0.0.0: - version "0.0.1" - resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-0.0.1.tgz#7da8cf2e26628ed732803581fd21f67cacd2eeec" - integrity sha1-fajPLiZijtcygDWB/SH2fKzS7uw= - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= - -array-flatten@^2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== - -array-map@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/array-map/-/array-map-0.0.0.tgz#88a2bab73d1cf7bcd5c1b118a003f66f665fa662" - integrity sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI= - -array-reduce@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/array-reduce/-/array-reduce-0.0.0.tgz#173899d3ffd1c7d9383e4479525dbe278cab5f2b" - integrity sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys= - -array-union@^1.0.1, array-union@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= - dependencies: - array-uniq "^1.0.1" - -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= - -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - -asn1.js@^4.0.0: - version "4.10.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" - integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -asn1@~0.2.3: - version "0.2.4" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" - integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== - dependencies: - safer-buffer "~2.1.0" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= - -assert@^1.1.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" - integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== - dependencies: - object-assign "^4.1.1" - util "0.10.3" - -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - -async-limiter@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.0.tgz#78faed8c3d074ab81f22b4e985d79e8738f720f8" - integrity sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg== - -async@^1.5.2: - version "1.5.2" - resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" - integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= - -atob@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - -autoprefixer@^9.5.1: - version "9.5.1" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.5.1.tgz#243b1267b67e7e947f28919d786b50d3bb0fb357" - integrity sha512-KJSzkStUl3wP0D5sdMlP82Q52JLy5+atf2MHAre48+ckWkXgixmfHyWmA77wFDy6jTHU6mIgXv6hAQ2mf1PjJQ== - dependencies: - browserslist "^4.5.4" - caniuse-lite "^1.0.30000957" - normalize-range "^0.1.2" - num2fraction "^1.2.2" - postcss "^7.0.14" - postcss-value-parser "^3.3.1" - -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= - -aws4@^1.8.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" - integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== - -babel-code-frame@^6.22.0, babel-code-frame@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= - dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" - -babel-eslint@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.0.1.tgz#919681dc099614cd7d31d45c8908695092a1faed" - integrity sha512-z7OT1iNV+TjOwHNLLyJk+HN+YVWX+CLE6fPD2SymJZOZQBs+QIexFjhm4keGTm8MW9xr4EC9Q0PbaLB24V5GoQ== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.0.0" - "@babel/traverse" "^7.0.0" - "@babel/types" "^7.0.0" - eslint-scope "3.7.1" - eslint-visitor-keys "^1.0.0" - -babel-loader@^8.0.5: - version "8.0.6" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" - integrity sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw== - dependencies: - find-cache-dir "^2.0.0" - loader-utils "^1.0.2" - mkdirp "^0.5.1" - pify "^4.0.1" - -babel-plugin-dynamic-import-node@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.2.0.tgz#c0adfb07d95f4a4495e9aaac6ec386c4d7c2524e" - integrity sha512-fP899ELUnTaBcIzmrW7nniyqqdYWrWuJUyPWHxFa/c7r7hS6KC8FscNfLlBNIoPSc55kYMGEEKjPjJGCLbE1qA== - dependencies: - object.assign "^4.1.0" - -babel-plugin-module-resolver@3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-module-resolver/-/babel-plugin-module-resolver-3.2.0.tgz#ddfa5e301e3b9aa12d852a9979f18b37881ff5a7" - integrity sha512-tjR0GvSndzPew/Iayf4uICWZqjBwnlMWjSx6brryfQ81F9rxBVqwDJtFCV8oOs0+vJeefK9TmdZtkIFdFe1UnA== - dependencies: - find-babel-config "^1.1.0" - glob "^7.1.2" - pkg-up "^2.0.0" - reselect "^3.0.1" - resolve "^1.4.0" - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= - -base64-js@^1.0.2: - version "1.3.0" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.0.tgz#cab1e6118f051095e58b5281aea8c1cd22bfc0e3" - integrity sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw== - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - -batch@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= - -bcrypt-pbkdf@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= - dependencies: - tweetnacl "^0.14.3" - -bfj@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/bfj/-/bfj-6.1.1.tgz#05a3b7784fbd72cfa3c22e56002ef99336516c48" - integrity sha512-+GUNvzHR4nRyGybQc2WpNJL4MJazMuvf92ueIyA0bIkPRwhhQu3IfZQ2PSoVPpCBJfmoSdOxu5rnotfFLlvYRQ== - dependencies: - bluebird "^3.5.1" - check-types "^7.3.0" - hoopy "^0.1.2" - tryer "^1.0.0" - -big.js@^3.1.3: - version "3.2.0" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" - integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - -bluebird@^3.1.1, bluebird@^3.5.1, bluebird@^3.5.3: - version "3.5.4" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.4.tgz#d6cc661595de30d5b3af5fcedd3c0b3ef6ec5714" - integrity sha512-FG+nFEZChJrbQ9tIccIfZJBz3J7mLrAhxakAbnrJWn8d7aKOC+LWifa0G+p4ZqKp4y13T7juYvdhq9NzKdsrjw== - -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: - version "4.11.8" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" - integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== - -body-parser@1.19.0: - version "1.19.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" - integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== - dependencies: - bytes "3.1.0" - content-type "~1.0.4" - debug "2.6.9" - depd "~1.1.2" - http-errors "1.7.2" - iconv-lite "0.4.24" - on-finished "~2.3.0" - qs "6.7.0" - raw-body "2.4.0" - type-is "~1.6.17" - -bonjour@^3.5.0: - version "3.5.0" - resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" - integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= - dependencies: - array-flatten "^2.1.0" - deep-equal "^1.0.1" - dns-equal "^1.0.0" - dns-txt "^2.0.2" - multicast-dns "^6.0.1" - multicast-dns-service-types "^1.1.0" - -boolbase@^1.0.0, boolbase@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - -brorand@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= - -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" - integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" - integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -browserify-rsa@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" - integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= - dependencies: - bn.js "^4.1.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" - integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= - dependencies: - bn.js "^4.1.1" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.2" - elliptic "^6.0.0" - inherits "^2.0.1" - parse-asn1 "^5.0.0" - -browserify-zlib@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" - integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== - dependencies: - pako "~1.0.5" - -browserslist@^4.0.0, browserslist@^4.3.4, browserslist@^4.5.4: - version "4.6.0" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.6.0.tgz#5274028c26f4d933d5b1323307c1d1da5084c9ff" - integrity sha512-Jk0YFwXBuMOOol8n6FhgkDzn3mY9PYLYGk29zybF05SbRTsMgPqmTNeQQhOghCxq5oFqAXE3u4sYddr4C0uRhg== - dependencies: - caniuse-lite "^1.0.30000967" - electron-to-chromium "^1.3.133" - node-releases "^1.1.19" - -buffer-from@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" - integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== - -buffer-indexof@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" - integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== - -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= - -buffer@^4.3.0: - version "4.9.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298" - integrity sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg= - dependencies: - base64-js "^1.0.2" - ieee754 "^1.1.4" - isarray "^1.0.0" - -builtin-status-codes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" - integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= - -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= - -bytes@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" - integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== - -cacache@^10.0.4: - version "10.0.4" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-10.0.4.tgz#6452367999eff9d4188aefd9a14e9d7c6a263460" - integrity sha512-Dph0MzuH+rTQzGPNT9fAnrPmMmjKfST6trxJeK7NQuHRaVw24VzPRWTmg9MpcwOVQZO0E1FBICUlFeNaKPIfHA== - dependencies: - bluebird "^3.5.1" - chownr "^1.0.1" - glob "^7.1.2" - graceful-fs "^4.1.11" - lru-cache "^4.1.1" - mississippi "^2.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" - promise-inflight "^1.0.1" - rimraf "^2.6.2" - ssri "^5.2.4" - unique-filename "^1.1.0" - y18n "^4.0.0" - -cacache@^11.3.2: - version "11.3.2" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-11.3.2.tgz#2d81e308e3d258ca38125b676b98b2ac9ce69bfa" - integrity sha512-E0zP4EPGDOaT2chM08Als91eYnf8Z+eH1awwwVsngUmgppfM5jjJ8l3z5vO5p5w/I3LsiXawb1sW0VY65pQABg== - dependencies: - bluebird "^3.5.3" - chownr "^1.1.1" - figgy-pudding "^3.5.1" - glob "^7.1.3" - graceful-fs "^4.1.15" - lru-cache "^5.1.1" - mississippi "^3.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" - promise-inflight "^1.0.1" - rimraf "^2.6.2" - ssri "^6.0.1" - unique-filename "^1.1.1" - y18n "^4.0.0" - -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - -cache-loader@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/cache-loader/-/cache-loader-2.0.1.tgz#5758f41a62d7c23941e3c3c7016e6faeb03acb07" - integrity sha512-V99T3FOynmGx26Zom+JrVBytLBsmUCzVG2/4NnUKgvXN4bEV42R1ERl1IyiH/cvFIDA1Ytq2lPZ9tXDSahcQpQ== - dependencies: - loader-utils "^1.1.0" - mkdirp "^0.5.1" - neo-async "^2.6.0" - normalize-path "^3.0.0" - schema-utils "^1.0.0" - -cacheable-request@^2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d" - integrity sha1-DYCIAbY0KtM8kd+dC0TcCbkeXD0= - dependencies: - clone-response "1.0.2" - get-stream "3.0.0" - http-cache-semantics "3.8.1" - keyv "3.0.0" - lowercase-keys "1.0.0" - normalize-url "2.0.1" - responselike "1.0.2" - -call-me-maybe@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" - integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= - -caller-callsite@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" - integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= - dependencies: - callsites "^2.0.0" - -caller-path@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" - integrity sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8= - dependencies: - callsites "^0.2.0" - -caller-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" - integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= - dependencies: - caller-callsite "^2.0.0" - -callsites@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" - integrity sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo= - -callsites@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" - integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= - -camel-case@3.0.x: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" - integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= - dependencies: - no-case "^2.2.0" - upper-case "^1.1.1" - -camelcase@^5.0.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -caniuse-api@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" - integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== - dependencies: - browserslist "^4.0.0" - caniuse-lite "^1.0.0" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000957, caniuse-lite@^1.0.30000967: - version "1.0.30000971" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000971.tgz#d1000e4546486a6977756547352bc96a4cfd2b13" - integrity sha512-TQFYFhRS0O5rdsmSbF1Wn+16latXYsQJat66f7S7lizXW1PVpWJeZw9wqqVLIjuxDRz7s7xRUj13QCfd8hKn6g== - -case-sensitive-paths-webpack-plugin@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.2.0.tgz#3371ef6365ef9c25fa4b81c16ace0e9c7dc58c3e" - integrity sha512-u5ElzokS8A1pm9vM3/iDgTcI3xqHxuCao94Oz8etI3cf0Tio0p8izkDYbTIn09uP3yUUr6+veaE6IkjnTYS46g== - -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= - -chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.4.1, chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chardet@^0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" - integrity sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I= - -check-types@^7.3.0: - version "7.4.0" - resolved "https://registry.yarnpkg.com/check-types/-/check-types-7.4.0.tgz#0378ec1b9616ec71f774931a3c6516fad8c152f4" - integrity sha512-YbulWHdfP99UfZ73NcUDlNJhEIDgm9Doq9GhpyXbF+7Aegi3CVV7qqMCKTTqJxlvEvnQBp9IA+dxsGN6xK/nSg== - -chokidar@^2.0.2, chokidar@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.6.tgz#b6cad653a929e244ce8a834244164d241fa954c5" - integrity sha512-V2jUo67OKkc6ySiRpJrjlpJKl9kDuG+Xb8VgsGzb+aEouhgS1D0weyPU4lEzdAcsCAvrih2J2BqyXqHWvVLw5g== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -chownr@^1.0.1, chownr@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.1.tgz#54726b8b8fff4df053c42187e801fb4412df1494" - integrity sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g== - -chrome-trace-event@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.0.tgz#45a91bd2c20c9411f0963b5aaeb9a1b95e09cc48" - integrity sha512-xDbVgyfDTT2piup/h8dK/y4QZfJRSa73bw1WZ8b4XM1o7fsFubUVGYcE+1ANtOzJJELGpYoG2961z0Z6OAld9A== - dependencies: - tslib "^1.9.0" - -ci-info@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497" - integrity sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A== - -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -circular-json@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" - integrity sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A== - -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - -clean-css@4.2.x: - version "4.2.1" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.1.tgz#2d411ef76b8569b6d0c84068dabe85b0aa5e5c17" - integrity sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g== - dependencies: - source-map "~0.6.0" - -cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= - dependencies: - restore-cursor "^2.0.0" - -cli-highlight@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/cli-highlight/-/cli-highlight-2.1.1.tgz#2180223d51618b112f4509cf96e4a6c750b07e97" - integrity sha512-0y0VlNmdD99GXZHYnvrQcmHxP8Bi6T00qucGgBgGv4kJ0RyDthNnnFPupHV7PYv/OXSVk+azFbOeaW6+vGmx9A== - dependencies: - chalk "^2.3.0" - highlight.js "^9.6.0" - mz "^2.4.0" - parse5 "^4.0.0" - yargs "^13.0.0" - -cli-spinners@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.1.0.tgz#22c34b4d51f573240885b201efda4e4ec9fff3c7" - integrity sha512-8B00fJOEh1HPrx4fo5eW16XmE1PcL1tGpGrxy63CXGP9nHdPBN63X75hA1zhvQuhVztJWLqV58Roj2qlNM7cAA== - -cli-width@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" - integrity sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk= - -clipboard@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.4.tgz#836dafd66cf0fea5d71ce5d5b0bf6e958009112d" - integrity sha512-Vw26VSLRpJfBofiVaFb/I8PVfdI1OxKcYShe6fm0sP/DtmiWQNCjhM/okTvdCo0G+lMMm1rMYbk4IK4x1X+kgQ== - dependencies: - good-listener "^1.2.2" - select "^1.1.2" - tiny-emitter "^2.0.0" - -clipboardy@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/clipboardy/-/clipboardy-2.0.0.tgz#3fcee421fdeca4e6a62ce72b66f3eb0c42165acd" - integrity sha512-XbVjHMsss0giNUkp/tV/3eEAZe8i1fZTLzmPKqjE1RGIAWOTiF5D014f6R+g53ZAq0IK3cPrJXFvqE8eQjhFYQ== - dependencies: - arch "^2.1.1" - execa "^1.0.0" - -cliui@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" - integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== - dependencies: - string-width "^2.1.1" - strip-ansi "^4.0.0" - wrap-ansi "^2.0.0" - -cliui@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" - integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== - dependencies: - string-width "^3.1.0" - strip-ansi "^5.2.0" - wrap-ansi "^5.1.0" - -clone-response@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" - -clone@^1.0.2: - version "1.0.4" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" - integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= - -coa@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" - integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== - dependencies: - "@types/q" "^1.5.1" - chalk "^2.4.1" - q "^1.1.2" - -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - -color-convert@^1.9.0, color-convert@^1.9.1: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= - -color-name@^1.0.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -color-string@^1.5.2: - version "1.5.3" - resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.3.tgz#c9bbc5f01b58b5492f3d6857459cb6590ce204cc" - integrity sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw== - dependencies: - color-name "^1.0.0" - simple-swizzle "^0.2.2" - -color@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/color/-/color-3.1.1.tgz#7abf5c0d38e89378284e873c207ae2172dcc8a61" - integrity sha512-PvUltIXRjehRKPSy89VnDWFKY58xyhTLyxIg21vwQBI6qLwZNPmC8k3C1uytIgFKEpOIzN4y32iPm8231zFHIg== - dependencies: - color-convert "^1.9.1" - color-string "^1.5.2" - -combined-stream@^1.0.6, combined-stream@~1.0.6: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@2.17.x: - version "2.17.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" - integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== - -commander@^2.18.0, commander@^2.19.0: - version "2.20.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" - integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== - -commander@~2.19.0: - version "2.19.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" - integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== - -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= - -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -compressible@~2.0.16: - version "2.0.17" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.17.tgz#6e8c108a16ad58384a977f3a482ca20bff2f38c1" - integrity sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw== - dependencies: - mime-db ">= 1.40.0 < 2" - -compression@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -concat-stream@^1.5.0, concat-stream@^1.6.0: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -connect-history-api-fallback@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" - integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== - -console-browserify@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10" - integrity sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA= - dependencies: - date-now "^0.1.4" - -console-control-strings@^1.0.0, console-control-strings@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= - -consolidate@^0.15.1: - version "0.15.1" - resolved "https://registry.yarnpkg.com/consolidate/-/consolidate-0.15.1.tgz#21ab043235c71a07d45d9aad98593b0dba56bab7" - integrity sha512-DW46nrsMJgy9kqAbPt5rKaCr7uFtpo4mSUvLHIUbJEjm0vo+aY5QLwBUq3FK4tRnJr/X0Psc0C4jf/h+HtXSMw== - dependencies: - bluebird "^3.1.1" - -constants-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" - integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= - -content-disposition@0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" - integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== - dependencies: - safe-buffer "5.1.2" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.1.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20" - integrity sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A== - dependencies: - safe-buffer "~5.1.1" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= - -cookie@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" - integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== - -copy-concurrently@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" - integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== - dependencies: - aproba "^1.1.1" - fs-write-stream-atomic "^1.0.8" - iferr "^0.1.5" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.0" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - -copy-webpack-plugin@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-4.6.0.tgz#e7f40dd8a68477d405dd1b7a854aae324b158bae" - integrity sha512-Y+SQCF+0NoWQryez2zXn5J5knmr9z/9qSQt7fbL78u83rxmigOy8X5+BFn8CFSuX+nKT8gpYwJX68ekqtQt6ZA== - dependencies: - cacache "^10.0.4" - find-cache-dir "^1.0.0" - globby "^7.1.1" - is-glob "^4.0.0" - loader-utils "^1.1.0" - minimatch "^3.0.4" - p-limit "^1.0.0" - serialize-javascript "^1.4.0" - -core-js@^2.6.5: - version "2.6.8" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.8.tgz#dc3a1e633a04267944e0cb850d3880f340248139" - integrity sha512-RWlREFU74TEkdXzyl1bka66O3kYp8jeTXrvJZDzVVMH8AiHUSOFpL1yfhQJ+wHocAm1m+4971W1PPzfLuCv1vg== - -core-util-is@1.0.2, core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= - -cosmiconfig@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-4.0.0.tgz#760391549580bbd2df1e562bc177b13c290972dc" - integrity sha512-6e5vDdrXZD+t5v0L8CrurPeybg4Fmf+FCSYxXKYVAqLUtyCSbuyqE059d0kDthTNRzKVjL7QMgNpEUlsoYH3iQ== - dependencies: - is-directory "^0.3.1" - js-yaml "^3.9.0" - parse-json "^4.0.0" - require-from-string "^2.0.1" - -cosmiconfig@^5.0.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" - integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== - dependencies: - import-fresh "^2.0.0" - is-directory "^0.3.1" - js-yaml "^3.13.1" - parse-json "^4.0.0" - -create-ecdh@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" - integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== - dependencies: - bn.js "^4.1.0" - elliptic "^6.0.0" - -create-hash@^1.1.0, create-hash@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -cross-spawn@^5.0.1, cross-spawn@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -cross-spawn@^6.0.0: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - -crypto-browserify@^3.11.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" - integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - randomfill "^1.0.3" - -css-color-names@0.0.4, css-color-names@^0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0" - integrity sha1-gIrcLnnPhHOAabZGyyDsJ762KeA= - -css-declaration-sorter@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz#c198940f63a76d7e36c1e71018b001721054cb22" - integrity sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA== - dependencies: - postcss "^7.0.1" - timsort "^0.3.0" - -css-loader@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-1.0.1.tgz#6885bb5233b35ec47b006057da01cc640b6b79fe" - integrity sha512-+ZHAZm/yqvJ2kDtPne3uX0C+Vr3Zn5jFn2N4HywtS5ujwvsVkyg0VArEXpl3BgczDA8anieki1FIzhchX4yrDw== - dependencies: - babel-code-frame "^6.26.0" - css-selector-tokenizer "^0.7.0" - icss-utils "^2.1.0" - loader-utils "^1.0.2" - lodash "^4.17.11" - postcss "^6.0.23" - postcss-modules-extract-imports "^1.2.0" - postcss-modules-local-by-default "^1.2.0" - postcss-modules-scope "^1.1.0" - postcss-modules-values "^1.3.0" - postcss-value-parser "^3.3.0" - source-list-map "^2.0.0" - -css-select-base-adapter@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" - integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== - -css-select@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" - integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= - dependencies: - boolbase "~1.0.0" - css-what "2.1" - domutils "1.5.1" - nth-check "~1.0.1" - -css-select@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.0.2.tgz#ab4386cec9e1f668855564b17c3733b43b2a5ede" - integrity sha512-dSpYaDVoWaELjvZ3mS6IKZM/y2PMPa/XYoEfYNZePL4U/XgyxZNroHEHReDx/d+VgXh9VbCTtFqLkFbmeqeaRQ== - dependencies: - boolbase "^1.0.0" - css-what "^2.1.2" - domutils "^1.7.0" - nth-check "^1.0.2" - -css-selector-tokenizer@^0.7.0: - version "0.7.1" - resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz#a177271a8bca5019172f4f891fc6eed9cbf68d5d" - integrity sha512-xYL0AMZJ4gFzJQsHUKa5jiWWi2vH77WVNg7JYRyewwj6oPh4yb/y6Y9ZCw9dsj/9UauMhtuxR+ogQd//EdEVNA== - dependencies: - cssesc "^0.1.0" - fastparse "^1.1.1" - regexpu-core "^1.0.0" - -css-tree@1.0.0-alpha.28: - version "1.0.0-alpha.28" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.28.tgz#8e8968190d886c9477bc8d61e96f61af3f7ffa7f" - integrity sha512-joNNW1gCp3qFFzj4St6zk+Wh/NBv0vM5YbEreZk0SD4S23S+1xBKb6cLDg2uj4P4k/GUMlIm6cKIDqIG+vdt0w== - dependencies: - mdn-data "~1.1.0" - source-map "^0.5.3" - -css-tree@1.0.0-alpha.29: - version "1.0.0-alpha.29" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39" - integrity sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg== - dependencies: - mdn-data "~1.1.0" - source-map "^0.5.3" - -css-unit-converter@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/css-unit-converter/-/css-unit-converter-1.1.1.tgz#d9b9281adcfd8ced935bdbaba83786897f64e996" - integrity sha1-2bkoGtz9jO2TW9urqDeGiX9k6ZY= - -css-url-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/css-url-regex/-/css-url-regex-1.1.0.tgz#83834230cc9f74c457de59eebd1543feeb83b7ec" - integrity sha1-g4NCMMyfdMRX3lnuvRVD/uuDt+w= - -css-what@2.1, css-what@^2.1.2: - version "2.1.3" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" - integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== - -cssesc@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-0.1.0.tgz#c814903e45623371a0477b40109aaafbeeaddbb4" - integrity sha1-yBSQPkViM3GgR3tAEJqq++6t27Q= - -cssesc@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-2.0.0.tgz#3b13bd1bb1cb36e1bcb5a4dcd27f54c5dcb35703" - integrity sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg== - -cssnano-preset-default@^4.0.0, cssnano-preset-default@^4.0.7: - version "4.0.7" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz#51ec662ccfca0f88b396dcd9679cdb931be17f76" - integrity sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA== - dependencies: - css-declaration-sorter "^4.0.1" - cssnano-util-raw-cache "^4.0.1" - postcss "^7.0.0" - postcss-calc "^7.0.1" - postcss-colormin "^4.0.3" - postcss-convert-values "^4.0.1" - postcss-discard-comments "^4.0.2" - postcss-discard-duplicates "^4.0.2" - postcss-discard-empty "^4.0.1" - postcss-discard-overridden "^4.0.1" - postcss-merge-longhand "^4.0.11" - postcss-merge-rules "^4.0.3" - postcss-minify-font-values "^4.0.2" - postcss-minify-gradients "^4.0.2" - postcss-minify-params "^4.0.2" - postcss-minify-selectors "^4.0.2" - postcss-normalize-charset "^4.0.1" - postcss-normalize-display-values "^4.0.2" - postcss-normalize-positions "^4.0.2" - postcss-normalize-repeat-style "^4.0.2" - postcss-normalize-string "^4.0.2" - postcss-normalize-timing-functions "^4.0.2" - postcss-normalize-unicode "^4.0.1" - postcss-normalize-url "^4.0.1" - postcss-normalize-whitespace "^4.0.2" - postcss-ordered-values "^4.1.2" - postcss-reduce-initial "^4.0.3" - postcss-reduce-transforms "^4.0.2" - postcss-svgo "^4.0.2" - postcss-unique-selectors "^4.0.1" - -cssnano-util-get-arguments@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz#ed3a08299f21d75741b20f3b81f194ed49cc150f" - integrity sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8= - -cssnano-util-get-match@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz#c0e4ca07f5386bb17ec5e52250b4f5961365156d" - integrity sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0= - -cssnano-util-raw-cache@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz#b26d5fd5f72a11dfe7a7846fb4c67260f96bf282" - integrity sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA== - dependencies: - postcss "^7.0.0" - -cssnano-util-same-parent@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz#574082fb2859d2db433855835d9a8456ea18bbf3" - integrity sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q== - -cssnano@^4.0.0, cssnano@^4.1.10: - version "4.1.10" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.10.tgz#0ac41f0b13d13d465487e111b778d42da631b8b2" - integrity sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ== - dependencies: - cosmiconfig "^5.0.0" - cssnano-preset-default "^4.0.7" - is-resolvable "^1.0.0" - postcss "^7.0.0" - -csso@^3.5.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/csso/-/csso-3.5.1.tgz#7b9eb8be61628973c1b261e169d2f024008e758b" - integrity sha512-vrqULLffYU1Q2tLdJvaCYbONStnfkfimRxXNaGjxMldI0C7JPBC4rB1RyjhfdZ4m1frm8pM9uRPKH3d2knZ8gg== - dependencies: - css-tree "1.0.0-alpha.29" - -current-script-polyfill@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/current-script-polyfill/-/current-script-polyfill-1.0.0.tgz#f31cf7e4f3e218b0726e738ca92a02d3488ef615" - integrity sha1-8xz35PPiGLBybnOMqSoC00iO9hU= - -cyclist@~0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-0.2.2.tgz#1b33792e11e914a2fd6d6ed6447464444e5fa640" - integrity sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA= - -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= - dependencies: - assert-plus "^1.0.0" - -date-now@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b" - integrity sha1-6vQ5/U1ISK105cx9vvIAZyueNFs= - -de-indent@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d" - integrity sha1-sgOOhG3DO6pXlhKNCAS0VbjB4h0= - -debug@2.6.9, debug@^2.2.0, debug@^2.3.3: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^3.1.0, debug@^3.2.5, debug@^3.2.6: - version "3.2.6" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" - integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== - dependencies: - ms "^2.1.1" - -debug@^4.1.0, debug@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - -decamelize@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" - -deep-equal@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5" - integrity sha1-9dJgKStmDghO/0zbyfCK0yR0SLU= - -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - -deep-is@~0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= - -deepmerge@^1.5.2: - version "1.5.2" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-1.5.2.tgz#10499d868844cdad4fee0842df8c7f6f0c95a753" - integrity sha512-95k0GDqvBjZavkuvzx/YqVLv/6YYa17fz6ILMSf7neqQITCPbnfEnQvEgMPNjH4kgobe7+WIL0yJEHku+H3qtQ== - -default-gateway@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" - integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== - dependencies: - execa "^1.0.0" - ip-regex "^2.1.0" - -defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" - integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= - dependencies: - clone "^1.0.2" - -define-properties@^1.1.2, define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" - -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - -del@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" - integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== - dependencies: - "@types/glob" "^7.1.1" - globby "^6.1.0" - is-path-cwd "^2.0.0" - is-path-in-cwd "^2.0.0" - p-map "^2.0.0" - pify "^4.0.1" - rimraf "^2.6.3" - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= - -delegate@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" - integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw== - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= - -des.js@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc" - integrity sha1-wHTS4qpqipoH29YfmhXCzYPsjsw= - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -destroy@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= - -detect-libc@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" - integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= - -detect-node@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" - integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== - -diffie-hellman@^5.0.0: - version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" - integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - -dir-glob@^2.0.0, dir-glob@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" - integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== - dependencies: - path-type "^3.0.0" - -dns-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= - -dns-packet@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.1.tgz#12aa426981075be500b910eedcd0b47dd7deda5a" - integrity sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg== - dependencies: - ip "^1.1.0" - safe-buffer "^5.0.1" - -dns-txt@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" - integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= - dependencies: - buffer-indexof "^1.0.0" - -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -dom-converter@^0.2: - version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-serializer@0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.1.tgz#1ec4059e284babed36eec2941d4a970a189ce7c0" - integrity sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA== - dependencies: - domelementtype "^1.3.0" - entities "^1.1.1" - -domain-browser@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" - integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== - -domelementtype@1, domelementtype@^1.3.0, domelementtype@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domhandler@^2.3.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" - integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== - dependencies: - domelementtype "1" - -domutils@1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" - integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^1.5.1, domutils@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -dot-prop@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.0.tgz#1f19e0c2e1aa0e32797c49799f2837ac6af69c57" - integrity sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ== - dependencies: - is-obj "^1.0.0" - -dotenv-expand@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" - integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== - -dotenv@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-7.0.0.tgz#a2be3cd52736673206e8a85fb5210eea29628e7c" - integrity sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g== - -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - -duplexer@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1" - integrity sha1-rOb/gIwc5mtX0ev5eXessCM0z8E= - -duplexify@^3.4.2, duplexify@^3.6.0: - version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" - integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== - dependencies: - end-of-stream "^1.0.0" - inherits "^2.0.1" - readable-stream "^2.0.0" - stream-shift "^1.0.0" - -easy-stack@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/easy-stack/-/easy-stack-1.0.0.tgz#12c91b3085a37f0baa336e9486eac4bf94e3e788" - integrity sha1-EskbMIWjfwuqM26UhurEv5Tj54g= - -ecc-jsbn@~0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" - integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= - dependencies: - jsbn "~0.1.0" - safer-buffer "^2.1.0" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= - -ejs@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.1.tgz#498ec0d495655abc6f23cd61868d926464071aa0" - integrity sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ== - -electron-to-chromium@^1.3.133: - version "1.3.137" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.137.tgz#ba7c88024984c038a5c5c434529aabcea7b42944" - integrity sha512-kGi32g42a8vS/WnYE7ELJyejRT7hbr3UeOOu0WeuYuQ29gCpg9Lrf6RdcTQVXSt/v0bjCfnlb/EWOOsiKpTmkw== - -elliptic@^6.0.0: - version "6.4.1" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.4.1.tgz#c2d0b7776911b86722c632c3c06c60f2f819939a" - integrity sha512-BsXLz5sqX8OHcsh7CqBMztyXARmGQ3LWPtGjJi6DiJHq5C/qvi9P3OqgswKSDftbu8+IoI/QDTAm2fFnQ9SZSQ== - dependencies: - bn.js "^4.4.0" - brorand "^1.0.1" - hash.js "^1.0.0" - hmac-drbg "^1.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.0" - -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== - -emojis-list@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" - integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= - -end-of-stream@^1.0.0, end-of-stream@^1.1.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43" - integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q== - dependencies: - once "^1.4.0" - -enhanced-resolve@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" - integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.4.0" - tapable "^1.0.0" - -entities@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" - integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== - -errno@^0.1.3, errno@~0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" - integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== - dependencies: - prr "~1.0.1" - -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -error-stack-parser@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.2.tgz#4ae8dbaa2bf90a8b450707b9149dcabca135520d" - integrity sha512-E1fPutRDdIj/hohG0UpT5mayXNCxXP9d+snxFsPU9X0XgccOumKraa3juDMwTUyi7+Bu5+mCGagjg4IYeNbOdw== - dependencies: - stackframe "^1.0.4" - -es-abstract@^1.12.0, es-abstract@^1.4.3, es-abstract@^1.5.1: - version "1.13.0" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9" - integrity sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg== - dependencies: - es-to-primitive "^1.2.0" - function-bind "^1.1.1" - has "^1.0.3" - is-callable "^1.1.4" - is-regex "^1.0.4" - object-keys "^1.0.12" - -es-to-primitive@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377" - integrity sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= - -eslint-loader@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/eslint-loader/-/eslint-loader-2.1.2.tgz#453542a1230d6ffac90e4e7cb9cadba9d851be68" - integrity sha512-rA9XiXEOilLYPOIInvVH5S/hYfyTPyxag6DZhoQOduM+3TkghAEQ3VcFO8VnX4J4qg/UIBzp72aOf/xvYmpmsg== - dependencies: - loader-fs-cache "^1.0.0" - loader-utils "^1.0.2" - object-assign "^4.0.1" - object-hash "^1.1.4" - rimraf "^2.6.1" - -eslint-plugin-vue@^4.7.1: - version "4.7.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-vue/-/eslint-plugin-vue-4.7.1.tgz#c829b9fc62582c1897b5a0b94afd44ecca511e63" - integrity sha512-esETKhVMI7Vdli70Wt4bvAwnZBJeM0pxVX9Yb0wWKxdCJc2EADalVYK/q2FzMw8oKN0wPMdqVCKS8kmR89recA== - dependencies: - vue-eslint-parser "^2.0.3" - -eslint-scope@3.7.1: - version "3.7.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8" - integrity sha1-PWPD7f2gLgbgGkUq2IyqzHzctug= - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-scope@^3.7.1: - version "3.7.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.3.tgz#bb507200d3d17f60247636160b4826284b108535" - integrity sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-scope@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" - integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-visitor-keys@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d" - integrity sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ== - -eslint@^4.19.1: - version "4.19.1" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-4.19.1.tgz#32d1d653e1d90408854bfb296f076ec7e186a300" - integrity sha512-bT3/1x1EbZB7phzYu7vCr1v3ONuzDtX8WjuM9c0iYxe+cq+pwcKEoQjl7zd3RpC6YOLgnSy3cTN58M2jcoPDIQ== - dependencies: - ajv "^5.3.0" - babel-code-frame "^6.22.0" - chalk "^2.1.0" - concat-stream "^1.6.0" - cross-spawn "^5.1.0" - debug "^3.1.0" - doctrine "^2.1.0" - eslint-scope "^3.7.1" - eslint-visitor-keys "^1.0.0" - espree "^3.5.4" - esquery "^1.0.0" - esutils "^2.0.2" - file-entry-cache "^2.0.0" - functional-red-black-tree "^1.0.1" - glob "^7.1.2" - globals "^11.0.1" - ignore "^3.3.3" - imurmurhash "^0.1.4" - inquirer "^3.0.6" - is-resolvable "^1.0.0" - js-yaml "^3.9.1" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.3.0" - lodash "^4.17.4" - minimatch "^3.0.2" - mkdirp "^0.5.1" - natural-compare "^1.4.0" - optionator "^0.8.2" - path-is-inside "^1.0.2" - pluralize "^7.0.0" - progress "^2.0.0" - regexpp "^1.0.1" - require-uncached "^1.0.3" - semver "^5.3.0" - strip-ansi "^4.0.0" - strip-json-comments "~2.0.1" - table "4.0.2" - text-table "~0.2.0" - -espree@^3.5.2, espree@^3.5.4: - version "3.5.4" - resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.4.tgz#b0f447187c8a8bed944b815a660bddf5deb5d1a7" - integrity sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A== - dependencies: - acorn "^5.5.0" - acorn-jsx "^3.0.0" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" - integrity sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA== - dependencies: - estraverse "^4.0.0" - -esrecurse@^4.1.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" - integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== - dependencies: - estraverse "^4.1.0" - -estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM= - -esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= - -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= - -event-pubsub@4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/event-pubsub/-/event-pubsub-4.3.0.tgz#f68d816bc29f1ec02c539dc58c8dd40ce72cb36e" - integrity sha512-z7IyloorXvKbFx9Bpie2+vMJKKx1fH1EN5yiTfp8CiLOTptSYy1g8H4yDpGlEdshL1PBiFtBHepF2cNsqeEeFQ== - -eventemitter3@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - -events@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88" - integrity sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA== - -eventsource@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" - integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== - dependencies: - original "^1.0.0" - -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - -execa@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" - integrity sha1-2NdrvBtVIX7RkP1t1J08d07PyNo= - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -express@^4.16.3, express@^4.17.0: - version "4.17.0" - resolved "https://registry.yarnpkg.com/express/-/express-4.17.0.tgz#288af62228a73f4c8ea2990ba3b791bb87cd4438" - integrity sha512-1Z7/t3Z5ZnBG252gKUPyItc4xdeaA0X934ca2ewckAsVsw9EG71i++ZHZPYnus8g/s5Bty8IMpSVEuRkmwwPRQ== - dependencies: - accepts "~1.3.7" - array-flatten "1.1.1" - body-parser "1.19.0" - content-disposition "0.5.3" - content-type "~1.0.4" - cookie "0.4.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "~1.1.2" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "~1.1.2" - fresh "0.5.2" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "~2.3.0" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.5" - qs "6.7.0" - range-parser "~1.2.1" - safe-buffer "5.1.2" - send "0.17.1" - serve-static "1.14.1" - setprototypeof "1.1.1" - statuses "~1.5.0" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - -extend@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -external-editor@^2.0.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" - integrity sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A== - dependencies: - chardet "^0.4.0" - iconv-lite "^0.4.17" - tmp "^0.0.33" - -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -extsprintf@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" - integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= - -extsprintf@^1.2.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" - integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= - -fast-deep-equal@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" - integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= - -fast-deep-equal@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" - integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= - -fast-glob@^2.2.6: - version "2.2.7" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-2.2.7.tgz#6953857c3afa475fff92ee6015d52da70a4cd39d" - integrity sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw== - dependencies: - "@mrmlnc/readdir-enhanced" "^2.2.1" - "@nodelib/fs.stat" "^1.1.2" - glob-parent "^3.1.0" - is-glob "^4.0.0" - merge2 "^1.2.3" - micromatch "^3.1.10" - -fast-json-stable-stringify@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" - integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= - -fast-levenshtein@~2.0.4: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= - -fastparse@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9" - integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ== - -faye-websocket@^0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.10.0.tgz#4e492f8d04dfb6f89003507f6edbf2d501e7c6f4" - integrity sha1-TkkvjQTftviQA1B/btvy1QHnxvQ= - dependencies: - websocket-driver ">=0.5.1" - -faye-websocket@~0.11.1: - version "0.11.1" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.1.tgz#f0efe18c4f56e4f40afc7e06c719fd5ee6188f38" - integrity sha1-8O/hjE9W5PQK/H4Gxxn9XuYYjzg= - dependencies: - websocket-driver ">=0.5.1" - -figgy-pudding@^3.5.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" - integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= - dependencies: - escape-string-regexp "^1.0.5" - -file-entry-cache@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" - integrity sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E= - dependencies: - flat-cache "^1.2.1" - object-assign "^4.0.1" - -file-loader@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-3.0.1.tgz#f8e0ba0b599918b51adfe45d66d1e771ad560faa" - integrity sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw== - dependencies: - loader-utils "^1.0.2" - schema-utils "^1.0.0" - -filesize@^3.6.1: - version "3.6.1" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.6.1.tgz#090bb3ee01b6f801a8a8be99d31710b3422bb317" - integrity sha512-7KjR1vv6qnicaPMi1iiTcI85CyYwRO/PSFCu6SvqL8jN2Wjt/NIYQTFtFs7fSDCYOstUkEWIQGFUg5YZQfjlcg== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - -finalhandler@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.3" - statuses "~1.5.0" - unpipe "~1.0.0" - -find-babel-config@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/find-babel-config/-/find-babel-config-1.2.0.tgz#a9b7b317eb5b9860cda9d54740a8c8337a2283a2" - integrity sha512-jB2CHJeqy6a820ssiqwrKMeyC6nNdmrcgkKWJWmpoxpE8RKciYJXCcXRq1h2AzCo5I5BJeN2tkGEO3hLTuePRA== - dependencies: - json5 "^0.5.1" - path-exists "^3.0.0" - -find-cache-dir@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-0.1.1.tgz#c8defae57c8a52a8a784f9e31c57c742e993a0b9" - integrity sha1-yN765XyKUqinhPnjHFfHQumToLk= - dependencies: - commondir "^1.0.1" - mkdirp "^0.5.1" - pkg-dir "^1.0.0" - -find-cache-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-1.0.0.tgz#9288e3e9e3cc3748717d39eade17cf71fc30ee6f" - integrity sha1-kojj6ePMN0hxfTnq3hfPcfww7m8= - dependencies: - commondir "^1.0.1" - make-dir "^1.0.0" - pkg-dir "^2.0.0" - -find-cache-dir@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" - integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== - dependencies: - commondir "^1.0.1" - make-dir "^2.0.0" - pkg-dir "^3.0.0" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= - dependencies: - locate-path "^2.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -flat-cache@^1.2.1: - version "1.3.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.4.tgz#2c2ef77525cc2929007dfffa1dd314aa9c9dee6f" - integrity sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg== - dependencies: - circular-json "^0.3.1" - graceful-fs "^4.1.2" - rimraf "~2.6.2" - write "^0.2.1" - -flush-write-stream@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" - integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== - dependencies: - inherits "^2.0.3" - readable-stream "^2.3.6" - -follow-redirects@^1.0.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.7.0.tgz#489ebc198dc0e7f64167bd23b03c4c19b5784c76" - integrity sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ== - dependencies: - debug "^3.2.6" - -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= - -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - -forwarded@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" - integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= - -from2@^2.1.0, from2@^2.1.1: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - -fs-extra@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" - integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-minipass@^1.2.5: - version "1.2.6" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.6.tgz#2c5cc30ded81282bfe8a0d7c7c1853ddeb102c07" - integrity sha512-crhvyXcMejjv3Z5d2Fa9sf5xLYVCF5O1c71QxbVnbLsmYMBEvDAftewesN/HhY03YRoA7zOMxjNGrF5svGaaeQ== - dependencies: - minipass "^2.2.1" - -fs-write-stream-atomic@^1.0.8: - version "1.0.10" - resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" - integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= - dependencies: - graceful-fs "^4.1.2" - iferr "^0.1.5" - imurmurhash "^0.1.4" - readable-stream "1 || 2" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -fsevents@^1.2.7: - version "1.2.9" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" - integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== - dependencies: - nan "^2.12.1" - node-pre-gyp "^0.12.0" - -function-bind@^1.0.2, function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= - -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= - dependencies: - aproba "^1.0.3" - console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" - signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" - -get-caller-file@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" - integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== - -get-caller-file@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-stream@3.0.0, get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= - -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= - dependencies: - assert-plus "^1.0.0" - -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-to-regexp@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" - integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= - -glob@^7.0.3, glob@^7.1.2, glob@^7.1.3: - version "7.1.4" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" - integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^11.0.1, globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globby@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" - integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= - dependencies: - array-union "^1.0.1" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -globby@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/globby/-/globby-7.1.1.tgz#fb2ccff9401f8600945dfada97440cca972b8680" - integrity sha1-+yzP+UAfhgCUXfral0QMypcrhoA= - dependencies: - array-union "^1.0.1" - dir-glob "^2.0.0" - glob "^7.1.2" - ignore "^3.3.5" - pify "^3.0.0" - slash "^1.0.0" - -globby@^9.2.0: - version "9.2.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-9.2.0.tgz#fd029a706c703d29bdd170f4b6db3a3f7a7cb63d" - integrity sha512-ollPHROa5mcxDEkwg6bPt3QbEf4pDQSNtd6JPL1YvOvAo/7/0VAm9TccUeoTmarjPw4pfUthSCqcyfNB1I3ZSg== - dependencies: - "@types/glob" "^7.1.1" - array-union "^1.0.2" - dir-glob "^2.2.2" - fast-glob "^2.2.6" - glob "^7.1.3" - ignore "^4.0.3" - pify "^4.0.1" - slash "^2.0.0" - -good-listener@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" - integrity sha1-1TswzfkxPf+33JoNR3CWqm0UXFA= - dependencies: - delegate "^3.1.2" - -got@^8.0.3: - version "8.3.2" - resolved "https://registry.yarnpkg.com/got/-/got-8.3.2.tgz#1d23f64390e97f776cac52e5b936e5f514d2e937" - integrity sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw== - dependencies: - "@sindresorhus/is" "^0.7.0" - cacheable-request "^2.1.1" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - into-stream "^3.1.0" - is-retry-allowed "^1.1.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - mimic-response "^1.0.0" - p-cancelable "^0.4.0" - p-timeout "^2.0.1" - pify "^3.0.0" - safe-buffer "^5.1.1" - timed-out "^4.0.1" - url-parse-lax "^3.0.0" - url-to-options "^1.0.1" - -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6: - version "4.1.15" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.15.tgz#ffb703e1066e8a0eeaa4c8b80ba9253eeefbfb00" - integrity sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA== - -gzip-size@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274" - integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA== - dependencies: - duplexer "^0.1.1" - pify "^4.0.1" - -handle-thing@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754" - integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ== - -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= - -har-validator@~5.1.0: - version "5.1.3" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" - integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== - dependencies: - ajv "^6.5.5" - har-schema "^2.0.0" - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= - -has-symbol-support-x@^1.4.1: - version "1.4.2" - resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" - integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== - -has-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" - integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= - -has-to-string-tag-x@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" - integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== - dependencies: - has-symbol-support-x "^1.4.1" - -has-unicode@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= - -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - -has@^1.0.0, has@^1.0.1, has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -hash-base@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" - integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -hash-sum@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/hash-sum/-/hash-sum-1.0.2.tgz#33b40777754c6432573c120cc3808bbd10d47f04" - integrity sha1-M7QHd3VMZDJXPBIMw4CLvRDUfwQ= - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - -he@1.2.x, he@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -hex-color-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" - integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== - -highlight.js@^9.6.0: - version "9.15.6" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.15.6.tgz#72d4d8d779ec066af9a17cb14360c3def0aa57c4" - integrity sha512-zozTAWM1D6sozHo8kqhfYgsac+B+q0PmsjXeyDrYIHHcBN0zTVT66+s2GW1GZv7DbyaROdLXKdabwS/WqPyIdQ== - -hmac-drbg@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - -hoek@6.x.x: - version "6.1.3" - resolved "https://registry.yarnpkg.com/hoek/-/hoek-6.1.3.tgz#73b7d33952e01fe27a38b0457294b79dd8da242c" - integrity sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ== - -hoopy@^0.1.2: - version "0.1.4" - resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" - integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== - -hosted-git-info@^2.1.4: - version "2.7.1" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.7.1.tgz#97f236977bd6e125408930ff6de3eec6281ec047" - integrity sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w== - -hpack.js@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -hsl-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsl-regex/-/hsl-regex-1.0.0.tgz#d49330c789ed819e276a4c0d272dffa30b18fe6e" - integrity sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4= - -hsla-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" - integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= - -html-comment-regex@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7" - integrity sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ== - -html-entities@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f" - integrity sha1-DfKTUfByEWNRXfueVUPl9u7VFi8= - -html-minifier@^3.2.3: - version "3.5.21" - resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" - integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== - dependencies: - camel-case "3.0.x" - clean-css "4.2.x" - commander "2.17.x" - he "1.2.x" - param-case "2.1.x" - relateurl "0.2.x" - uglify-js "3.4.x" - -html-tags@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-2.0.0.tgz#10b30a386085f43cede353cc8fa7cb0deeea668b" - integrity sha1-ELMKOGCF9Dzt41PMj6fLDe7qZos= - -html-webpack-plugin@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" - integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= - dependencies: - html-minifier "^3.2.3" - loader-utils "^0.2.16" - lodash "^4.17.3" - pretty-error "^2.0.2" - tapable "^1.0.0" - toposort "^1.0.0" - util.promisify "1.0.0" - -htmlparser2@^3.3.0: - version "3.10.1" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" - integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== - dependencies: - domelementtype "^1.3.1" - domhandler "^2.3.0" - domutils "^1.5.1" - entities "^1.1.1" - inherits "^2.0.1" - readable-stream "^3.1.1" - -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= - -http-errors@1.7.2, http-errors@~1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" - integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.1" - statuses ">= 1.5.0 < 2" - toidentifier "1.0.0" - -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.4.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.0.tgz#d65edbede84349d0dc30320815a15d39cc3cbbd8" - integrity sha512-cZdEF7r4gfRIq7ezX9J0T+kQmJNOub71dWbgAXVHDct80TKP4MCETtZQ31xyv38UwgzkWPYF/Xc0ge55dW9Z9w== - -http-proxy-middleware@^0.19.1: - version "0.19.1" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" - integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== - dependencies: - http-proxy "^1.17.0" - is-glob "^4.0.0" - lodash "^4.17.11" - micromatch "^3.1.10" - -http-proxy@^1.17.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.17.0.tgz#7ad38494658f84605e2f6db4436df410f4e5be9a" - integrity sha512-Taqn+3nNvYRfJ3bGvKfBSRwy1v6eePlm3oc/aWVxZp57DQr5Eq3xhKJi7Z4hZpS8PC3H4qI+Yly5EmFacGuA/g== - dependencies: - eventemitter3 "^3.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -https-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" - integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= - -iconv-lite@0.4.24, iconv-lite@^0.4.17, iconv-lite@^0.4.4: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -icss-replace-symbols@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" - integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= - -icss-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-2.1.0.tgz#83f0a0ec378bf3246178b6c2ad9136f135b1c962" - integrity sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI= - dependencies: - postcss "^6.0.1" - -ieee754@^1.1.4: - version "1.1.13" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" - integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== - -iferr@^0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" - integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= - -ignore-walk@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" - integrity sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ== - dependencies: - minimatch "^3.0.4" - -ignore@^3.3.3, ignore@^3.3.5: - version "3.3.10" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" - integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== - -ignore@^4.0.3: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -import-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9" - integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk= - dependencies: - import-from "^2.1.0" - -import-fresh@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" - integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= - dependencies: - caller-path "^2.0.0" - resolve-from "^3.0.0" - -import-from@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1" - integrity sha1-M1238qev/VOqpHHUuAId7ja387E= - dependencies: - resolve-from "^3.0.0" - -import-local@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" - integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== - dependencies: - pkg-dir "^3.0.0" - resolve-cwd "^2.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -indexes-of@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" - integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= - -indexof@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" - integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -inherits@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" - integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= - -ini@~1.3.0: - version "1.3.5" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" - integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== - -inquirer@^3.0.6: - version "3.3.0" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.3.0.tgz#9dd2f2ad765dcab1ff0443b491442a20ba227dc9" - integrity sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ== - dependencies: - ansi-escapes "^3.0.0" - chalk "^2.0.0" - cli-cursor "^2.1.0" - cli-width "^2.0.0" - external-editor "^2.0.4" - figures "^2.0.0" - lodash "^4.3.0" - mute-stream "0.0.7" - run-async "^2.2.0" - rx-lite "^4.0.8" - rx-lite-aggregates "^4.0.8" - string-width "^2.1.0" - strip-ansi "^4.0.0" - through "^2.3.6" - -internal-ip@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" - integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== - dependencies: - default-gateway "^4.2.0" - ipaddr.js "^1.9.0" - -into-stream@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-3.1.0.tgz#96fb0a936c12babd6ff1752a17d05616abd094c6" - integrity sha1-lvsKk2wSur1v8XUqF9BWFqvQlMY= - dependencies: - from2 "^2.1.1" - p-is-promise "^1.1.0" - -invariant@^2.2.2: - version "2.2.4" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" - integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== - dependencies: - loose-envify "^1.0.0" - -invert-kv@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" - integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== - -ip-regex@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" - integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= - -ip@^1.1.0, ip@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" - integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= - -ipaddr.js@1.9.0, ipaddr.js@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" - integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== - -is-absolute-url@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" - integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY= - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= - -is-arrayish@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" - integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== - -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - -is-callable@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" - integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== - -is-ci@^1.0.10: - version "1.2.1" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c" - integrity sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg== - dependencies: - ci-info "^1.5.0" - -is-color-stop@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345" - integrity sha1-z/9HGu5N1cnhWFmPvhKWe1za00U= - dependencies: - css-color-names "^0.0.4" - hex-color-regex "^1.1.0" - hsl-regex "^1.0.0" - hsla-regex "^1.0.0" - rgb-regex "^1.0.1" - rgba-regex "^1.0.0" - -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - -is-date-object@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" - integrity sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY= - -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-directory@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" - integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.0, is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - -is-glob@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - -is-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= - -is-object@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.1.tgz#8952688c5ec2ffd6b03ecc85e769e02903083470" - integrity sha1-iVJojF7C/9awPsyF52ngKQMINHA= - -is-path-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.1.0.tgz#2e0c7e463ff5b7a0eb60852d851a6809347a124c" - integrity sha512-Sc5j3/YnM8tDeyCsVeKlm/0p95075DyLmDEIkSgQ7mXkrOX+uTCtmQFm0CYzVyJwcCCmO3k8qfJt17SxQwB5Zw== - -is-path-in-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" - integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== - dependencies: - is-path-inside "^2.1.0" - -is-path-inside@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" - integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== - dependencies: - path-is-inside "^1.0.2" - -is-plain-obj@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= - -is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-promise@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" - integrity sha1-eaKp7OfwlugPNtKy87wWwf9L8/o= - -is-regex@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" - integrity sha1-VRdIm1RwkbCTDglWVM7SXul+lJE= - dependencies: - has "^1.0.1" - -is-resolvable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" - integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== - -is-retry-allowed@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" - integrity sha1-EaBgVotnM5REAz0BJaYaINVk+zQ= - -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - -is-svg@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-3.0.0.tgz#9321dbd29c212e5ca99c4fa9794c714bcafa2f75" - integrity sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ== - dependencies: - html-comment-regex "^1.1.0" - -is-symbol@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.2.tgz#a055f6ae57192caee329e7a860118b497a950f38" - integrity sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw== - dependencies: - has-symbols "^1.0.0" - -is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= - -is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isemail@3.x.x: - version "3.2.0" - resolved "https://registry.yarnpkg.com/isemail/-/isemail-3.2.0.tgz#59310a021931a9fb06bbb51e155ce0b3f236832c" - integrity sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg== - dependencies: - punycode "2.x.x" - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= - -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= - -isurl@^1.0.0-alpha5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" - integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== - dependencies: - has-to-string-tag-x "^1.2.0" - is-object "^1.0.1" - -javascript-stringify@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-1.6.0.tgz#142d111f3a6e3dae8f4a9afd77d45855b5a9cce3" - integrity sha1-FC0RHzpuPa6PSpr9d9RYVbWpzOM= - -joi@^14.3.0: - version "14.3.1" - resolved "https://registry.yarnpkg.com/joi/-/joi-14.3.1.tgz#164a262ec0b855466e0c35eea2a885ae8b6c703c" - integrity sha512-LQDdM+pkOrpAn4Lp+neNIFV3axv1Vna3j38bisbQhETPMANYRbFJFUyOZcOClYvM/hppMhGWuKSFEK9vjrB+bQ== - dependencies: - hoek "6.x.x" - isemail "3.x.x" - topo "3.x.x" - -js-levenshtein@^1.1.3: - version "1.1.6" - resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" - integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== - -js-message@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/js-message/-/js-message-1.0.5.tgz#2300d24b1af08e89dd095bc1a4c9c9cfcb892d15" - integrity sha1-IwDSSxrwjondCVvBpMnJz8uJLRU= - -js-queue@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/js-queue/-/js-queue-2.0.0.tgz#362213cf860f468f0125fc6c96abc1742531f948" - integrity sha1-NiITz4YPRo8BJfxslqvBdCUx+Ug= - dependencies: - easy-stack "^1.0.0" - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= - -js-yaml@^3.13.1, js-yaml@^3.9.0, js-yaml@^3.9.1: - version "3.13.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" - integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= - -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= - -json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" - integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== - -json-schema-traverse@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" - integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= - -json2yaml@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/json2yaml/-/json2yaml-1.1.0.tgz#5414d907f9816586b80c513ec2e3aeb2ab819a6c" - integrity sha1-VBTZB/mBZYa4DFE+wuOusquBmmw= - dependencies: - remedial "1.x" - -json3@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" - integrity sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE= - -json5@^0.5.0, json5@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= - -json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== - dependencies: - minimist "^1.2.0" - -json5@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.0.tgz#e7a0c62c48285c628d20a10b85c89bb807c32850" - integrity sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ== - dependencies: - minimist "^1.2.0" - -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= - optionalDependencies: - graceful-fs "^4.1.6" - -jsonify@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" - integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= - -jsprim@^1.2.2: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" - integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.2.3" - verror "1.10.0" - -keyv@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.0.0.tgz#44923ba39e68b12a7cec7df6c3268c031f2ef373" - integrity sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA== - dependencies: - json-buffer "3.0.0" - -killable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" - integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" - integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== - -launch-editor-middleware@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/launch-editor-middleware/-/launch-editor-middleware-2.2.1.tgz#e14b07e6c7154b0a4b86a0fd345784e45804c157" - integrity sha512-s0UO2/gEGiCgei3/2UN3SMuUj1phjQN8lcpnvgLSz26fAzNWPQ6Nf/kF5IFClnfU2ehp6LrmKdMU/beveO+2jg== - dependencies: - launch-editor "^2.2.1" - -launch-editor@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.2.1.tgz#871b5a3ee39d6680fcc26d37930b6eeda89db0ca" - integrity sha512-On+V7K2uZK6wK7x691ycSUbLD/FyKKelArkbaAMSSJU8JmqmhwN2+mnJDNINuJWSrh2L0kDk+ZQtbC/gOWUwLw== - dependencies: - chalk "^2.3.0" - shell-quote "^1.6.1" - -lcid@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" - integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== - dependencies: - invert-kv "^2.0.0" - -levn@^0.3.0, levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -loader-fs-cache@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/loader-fs-cache/-/loader-fs-cache-1.0.2.tgz#54cedf6b727e1779fd8f01205f05f6e88706f086" - integrity sha512-70IzT/0/L+M20jUlEqZhZyArTU6VKLRTYRDAYN26g4jfzpJqjipLL3/hgYpySqI9PwsVRHHFja0LfEmsx9X2Cw== - dependencies: - find-cache-dir "^0.1.1" - mkdirp "0.5.1" - -loader-runner@^2.3.0, loader-runner@^2.3.1: - version "2.4.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" - integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== - -loader-utils@^0.2.16: - version "0.2.17" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" - integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= - dependencies: - big.js "^3.1.3" - emojis-list "^2.0.0" - json5 "^0.5.0" - object-assign "^4.0.1" - -loader-utils@^1.0.2, loader-utils@^1.1.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" - integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== - dependencies: - big.js "^5.2.2" - emojis-list "^2.0.0" - json5 "^1.0.1" - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -lodash.defaultsdeep@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.0.tgz#bec1024f85b1bd96cbea405b23c14ad6443a6f81" - integrity sha1-vsECT4WxvZbL6kBbI8FK1kQ6b4E= - -lodash.kebabcase@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" - integrity sha1-hImxyw0p/4gZXM7KRI/21swpXDY= - -lodash.mapvalues@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.mapvalues/-/lodash.mapvalues-4.6.0.tgz#1bafa5005de9dd6f4f26668c30ca37230cc9689c" - integrity sha1-G6+lAF3p3W9PJmaMMMo3IwzJaJw= - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= - -lodash.transform@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.transform/-/lodash.transform-4.6.0.tgz#12306422f63324aed8483d3f38332b5f670547a0" - integrity sha1-EjBkIvYzJK7YSD0/ODMrX2cFR6A= - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= - -lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.3.0: - version "4.17.11" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" - integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== - -log-symbols@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" - integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== - dependencies: - chalk "^2.0.1" - -loglevel@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.1.tgz#e0fc95133b6ef276cdc8887cdaf24aa6f156f8fa" - integrity sha1-4PyVEztu8nbNyIh82vJKpvFW+Po= - -loose-envify@^1.0.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" - integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= - -lowercase-keys@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" - integrity sha1-TjNms55/VFfjXxMkvfb4jQv8cwY= - -lowercase-keys@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lru-cache@^4.0.1, lru-cache@^4.1.1, lru-cache@^4.1.2: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -make-dir@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== - dependencies: - pify "^3.0.0" - -make-dir@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" - integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== - dependencies: - pify "^4.0.1" - semver "^5.6.0" - -map-age-cleaner@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" - integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== - dependencies: - p-defer "^1.0.0" - -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -mdn-data@~1.1.0: - version "1.1.4" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-1.1.4.tgz#50b5d4ffc4575276573c4eedb8780812a8419f01" - integrity sha512-FSYbp3lyKjyj3E7fMl6rYvUdX0FBXaluGqlFoYESWQlyUTq8R+wp0rkFxoYFqZlHCvsUXGjyJmLQSnXToYhOSA== - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= - -mem@^4.0.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" - integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== - dependencies: - map-age-cleaner "^0.1.1" - mimic-fn "^2.0.0" - p-is-promise "^2.0.0" - -memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= - -merge-source-map@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/merge-source-map/-/merge-source-map-1.1.0.tgz#2fdde7e6020939f70906a68f2d7ae685e4c8c646" - integrity sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw== - dependencies: - source-map "^0.6.1" - -merge2@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.2.3.tgz#7ee99dbd69bb6481689253f018488a1b902b0ed5" - integrity sha512-gdUU1Fwj5ep4kplwcmftruWofEFt6lfpkkr3h860CXbAB9c3hGb55EOL2ali0Td5oebvW0E1+3Sr+Ur7XfKpRA== - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= - -micromatch@^3.1.10, micromatch@^3.1.4, micromatch@^3.1.8: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -miller-rabin@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" - integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== - dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" - -mime-db@1.40.0, "mime-db@>= 1.40.0 < 2": - version "1.40.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.40.0.tgz#a65057e998db090f732a68f6c276d387d4126c32" - integrity sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA== - -mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: - version "2.1.24" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.24.tgz#b6f8d0b3e951efb77dedeca194cff6d16f676f81" - integrity sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ== - dependencies: - mime-db "1.40.0" - -mime@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mime@^2.0.3, mime@^2.4.2: - version "2.4.3" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.3.tgz#229687331e86f68924e6cb59e1cdd937f18275fe" - integrity sha512-QgrPRJfE+riq5TPZMcHZOtm8c6K/yYrMbKIoRfapfiGLxS8OTeIfRhUGW5LU7MlRa52KOAGCfUNruqLrIBvWZw== - -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - -mimic-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -mimic-response@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - -mini-css-extract-plugin@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz#a3f13372d6fcde912f3ee4cd039665704801e3b9" - integrity sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw== - dependencies: - loader-utils "^1.1.0" - normalize-url "^2.0.1" - schema-utils "^1.0.0" - webpack-sources "^1.1.0" - -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= - -minimatch@^3.0.2, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= - -minimist@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= - -minipass@^2.2.1, minipass@^2.3.4: - version "2.3.5" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848" - integrity sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA== - dependencies: - safe-buffer "^5.1.2" - yallist "^3.0.0" - -minizlib@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.1.tgz#dd27ea6136243c7c880684e8672bb3a45fd9b614" - integrity sha512-7+4oTUOWKg7AuL3vloEWekXY2/D20cevzsrNT2kGWm+39J9hGTCBv8VI5Pm5lXZ/o3/mdR4f8rflAPhnQb8mPA== - dependencies: - minipass "^2.2.1" - -mississippi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-2.0.0.tgz#3442a508fafc28500486feea99409676e4ee5a6f" - integrity sha512-zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw== - dependencies: - concat-stream "^1.5.0" - duplexify "^3.4.2" - end-of-stream "^1.1.0" - flush-write-stream "^1.0.0" - from2 "^2.1.0" - parallel-transform "^1.1.0" - pump "^2.0.1" - pumpify "^1.3.3" - stream-each "^1.1.0" - through2 "^2.0.0" - -mississippi@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" - integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== - dependencies: - concat-stream "^1.5.0" - duplexify "^3.4.2" - end-of-stream "^1.1.0" - flush-write-stream "^1.0.0" - from2 "^2.1.0" - parallel-transform "^1.1.0" - pump "^3.0.0" - pumpify "^1.3.3" - stream-each "^1.1.0" - through2 "^2.0.0" - -mixin-deep@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" - integrity sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp@0.5.1, mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= - dependencies: - minimist "0.0.8" - -move-concurrently@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" - integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= - dependencies: - aproba "^1.1.1" - copy-concurrently "^1.0.0" - fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.3" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@2.1.1, ms@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" - integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== - -multicast-dns-service-types@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" - integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= - -multicast-dns@^6.0.1: - version "6.2.3" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" - integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== - dependencies: - dns-packet "^1.3.1" - thunky "^1.0.2" - -mute-stream@0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" - integrity sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s= - -mz@^2.4.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" - integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== - dependencies: - any-promise "^1.0.0" - object-assign "^4.0.1" - thenify-all "^1.0.0" - -nan@^2.12.1: - version "2.14.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" - integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== - -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= - -needle@^2.2.1: - version "2.4.0" - resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" - integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== - dependencies: - debug "^3.2.6" - iconv-lite "^0.4.4" - sax "^1.2.4" - -negotiator@0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" - integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== - -neo-async@^2.5.0, neo-async@^2.6.0: - version "2.6.1" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" - integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== - -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - -no-case@^2.2.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" - integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== - dependencies: - lower-case "^1.1.1" - -node-forge@0.7.5: - version "0.7.5" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.5.tgz#6c152c345ce11c52f465c2abd957e8639cd674df" - integrity sha512-MmbQJ2MTESTjt3Gi/3yG1wGpIMhUfcIypUCGtTizFR9IiccFwxSpfp0vtIZlkFclEqERemxfnSdZEMR9VqqEFQ== - -node-ipc@^9.1.1: - version "9.1.1" - resolved "https://registry.yarnpkg.com/node-ipc/-/node-ipc-9.1.1.tgz#4e245ed6938e65100e595ebc5dc34b16e8dd5d69" - integrity sha512-FAyICv0sIRJxVp3GW5fzgaf9jwwRQxAKDJlmNFUL5hOy+W4X/I5AypyHoq0DXXbo9o/gt79gj++4cMr4jVWE/w== - dependencies: - event-pubsub "4.3.0" - js-message "1.0.5" - js-queue "2.0.0" - -node-libs-browser@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.0.tgz#c72f60d9d46de08a940dedbb25f3ffa2f9bbaa77" - integrity sha512-5MQunG/oyOaBdttrL40dA7bUfPORLRWMUJLQtMg7nluxUvk5XwnLdL9twQHFAjRx/y7mIMkLKT9++qPbbk6BZA== - dependencies: - assert "^1.1.1" - browserify-zlib "^0.2.0" - buffer "^4.3.0" - console-browserify "^1.1.0" - constants-browserify "^1.0.0" - crypto-browserify "^3.11.0" - domain-browser "^1.1.1" - events "^3.0.0" - https-browserify "^1.0.0" - os-browserify "^0.3.0" - path-browserify "0.0.0" - process "^0.11.10" - punycode "^1.2.4" - querystring-es3 "^0.2.0" - readable-stream "^2.3.3" - stream-browserify "^2.0.1" - stream-http "^2.7.2" - string_decoder "^1.0.0" - timers-browserify "^2.0.4" - tty-browserify "0.0.0" - url "^0.11.0" - util "^0.11.0" - vm-browserify "0.0.4" - -node-pre-gyp@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" - integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== - dependencies: - detect-libc "^1.0.2" - mkdirp "^0.5.1" - needle "^2.2.1" - nopt "^4.0.1" - npm-packlist "^1.1.6" - npmlog "^4.0.2" - rc "^1.2.7" - rimraf "^2.6.1" - semver "^5.3.0" - tar "^4" - -node-releases@^1.1.19: - version "1.1.21" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.21.tgz#46c86f9adaceae4d63c75d3c2f2e6eee618e55f3" - integrity sha512-TwnURTCjc8a+ElJUjmDqU6+12jhli1Q61xOQmdZ7ECZVBZuQpN/1UnembiIHDM1wCcfLvh5wrWXUF5H6ufX64Q== - dependencies: - semver "^5.3.0" - -nopt@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" - integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= - dependencies: - abbrev "1" - osenv "^0.1.4" - -normalize-package-data@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-1.0.0.tgz#32d0e472f91ff345701c15a8311018d3b0a90379" - integrity sha1-MtDkcvkf80VwHBWoMRAY07CpA3k= - -normalize-path@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= - dependencies: - remove-trailing-separator "^1.0.1" - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-range@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= - -normalize-url@2.0.1, normalize-url@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" - integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== - dependencies: - prepend-http "^2.0.0" - query-string "^5.0.1" - sort-keys "^2.0.0" - -normalize-url@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" - integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== - -npm-bundled@^1.0.1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" - integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== - -npm-packlist@^1.1.6: - version "1.4.1" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.1.tgz#19064cdf988da80ea3cee45533879d90192bbfbc" - integrity sha512-+TcdO7HJJ8peiiYhvPxsEDhF3PJFGUGRcFsGve3vxvxdcpO2Z4Z7rkosRM0kWj6LfbK/P0gu3dzk5RU1ffvFcw== - dependencies: - ignore-walk "^3.0.1" - npm-bundled "^1.0.1" - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" - -npmlog@^4.0.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -nth-check@^1.0.2, nth-check@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -num2fraction@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" - integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4= - -number-is-nan@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" - integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= - -oauth-sign@~0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" - integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== - -object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= - -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - -object-hash@^1.1.4: - version "1.3.1" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-1.3.1.tgz#fde452098a951cb145f039bb7d455449ddc126df" - integrity sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA== - -object-keys@^1.0.11, object-keys@^1.0.12: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - -object.assign@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" - integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== - dependencies: - define-properties "^1.1.2" - function-bind "^1.1.1" - has-symbols "^1.0.0" - object-keys "^1.0.11" - -object.getownpropertydescriptors@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" - integrity sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY= - dependencies: - define-properties "^1.1.2" - es-abstract "^1.5.1" - -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - -object.values@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.0.tgz#bf6810ef5da3e5325790eaaa2be213ea84624da9" - integrity sha512-8mf0nKLAoFX6VlNVdhGj31SVYpaNFtUnuoOXWyFEstsWRgU837AK+JYM0iAxwkSzGRbwn8cbFmgbyxj1j4VbXg== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.12.0" - function-bind "^1.1.1" - has "^1.0.3" - -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= - dependencies: - mimic-fn "^1.0.0" - -opener@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.1.tgz#6d2f0e77f1a0af0032aca716c2c1fbb8e7e8abed" - integrity sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA== - -opn@^5.3.0, opn@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" - integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== - dependencies: - is-wsl "^1.1.0" - -optionator@^0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" - integrity sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q= - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.4" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - wordwrap "~1.0.0" - -ora@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/ora/-/ora-3.4.0.tgz#bf0752491059a3ef3ed4c85097531de9fdbcd318" - integrity sha512-eNwHudNbO1folBP3JsZ19v9azXWtQZjICdr3Q0TDPIaeBQ3mXLrh54wM+er0+hSp+dWKf+Z8KM58CYzEyIYxYg== - dependencies: - chalk "^2.4.2" - cli-cursor "^2.1.0" - cli-spinners "^2.0.0" - log-symbols "^2.2.0" - strip-ansi "^5.2.0" - wcwidth "^1.0.1" - -original@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" - integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== - dependencies: - url-parse "^1.4.3" - -os-browserify@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" - integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= - -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= - -os-locale@^3.0.0, os-locale@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" - integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== - dependencies: - execa "^1.0.0" - lcid "^2.0.0" - mem "^4.0.0" - -os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= - -osenv@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" - integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -p-cancelable@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.1.tgz#35f363d67d52081c8d9585e37bcceb7e0bbcb2a0" - integrity sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ== - -p-defer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" - integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - -p-is-promise@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-1.1.0.tgz#9c9456989e9f6588017b0434d56097675c3da05e" - integrity sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4= - -p-is-promise@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" - integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== - -p-limit@^1.0.0, p-limit@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" - integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== - dependencies: - p-try "^1.0.0" - -p-limit@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.0.tgz#417c9941e6027a9abcba5092dd2904e255b5fbc2" - integrity sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ== - dependencies: - p-try "^2.0.0" - -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= - dependencies: - p-limit "^1.1.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== - -p-timeout@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-2.0.1.tgz#d8dd1979595d2dc0139e1fe46b8b646cb3cdf038" - integrity sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA== - dependencies: - p-finally "^1.0.0" - -p-try@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" - integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -pako@~1.0.5: - version "1.0.10" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.10.tgz#4328badb5086a426aa90f541977d4955da5c9732" - integrity sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw== - -parallel-transform@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.1.0.tgz#d410f065b05da23081fcd10f28854c29bda33b06" - integrity sha1-1BDwZbBdojCB/NEPKIVMKb2jOwY= - dependencies: - cyclist "~0.2.2" - inherits "^2.0.3" - readable-stream "^2.1.5" - -param-case@2.1.x: - version "2.1.1" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" - integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= - dependencies: - no-case "^2.2.0" - -parse-asn1@^5.0.0: - version "5.1.4" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.4.tgz#37f6628f823fbdeb2273b4d540434a22f3ef1fcc" - integrity sha512-Qs5duJcuvNExRfFZ99HDD3z4mAi3r9Wl/FOjEOijlxwCZs7E7mW2vjTpgQ4J8LpTF8x5v+1Vn5UQFejmWT11aw== - dependencies: - asn1.js "^4.0.0" - browserify-aes "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - safe-buffer "^5.1.1" - -parse-json@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" - integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= - dependencies: - error-ex "^1.3.1" - json-parse-better-errors "^1.0.1" - -parse5@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-4.0.0.tgz#6d78656e3da8d78b4ec0b906f7c08ef1dfe3f608" - integrity sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA== - -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -path-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.0.tgz#a0b870729aae214005b7d5032ec2cbbb0fb4451a" - integrity sha1-oLhwcpquIUAFt9UDLsLLuw+0RRo= - -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= - dependencies: - pinkie-promise "^2.0.0" - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -path-is-inside@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" - integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= - -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - -path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= - -path-type@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" - integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== - dependencies: - pify "^3.0.0" - -pbkdf2@^3.0.3: - version "3.0.17" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" - integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= - -pify@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= - -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= - -pkg-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4" - integrity sha1-ektQio1bstYp1EcFb/TpyTFM89Q= - dependencies: - find-up "^1.0.0" - -pkg-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" - integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= - dependencies: - find-up "^2.1.0" - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" - -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - -pluralize@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777" - integrity sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow== - -portfinder@^1.0.20: - version "1.0.20" - resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.20.tgz#bea68632e54b2e13ab7b0c4775e9b41bf270e44a" - integrity sha512-Yxe4mTyDzTd59PZJY4ojZR8F+E5e97iq2ZOHPz3HDgSvYC5siNad2tLooQ5y5QHyQhc3xVqvyk/eNA3wuoa7Sw== - dependencies: - async "^1.5.2" - debug "^2.2.0" - mkdirp "0.5.x" - -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -postcss-calc@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-7.0.1.tgz#36d77bab023b0ecbb9789d84dcb23c4941145436" - integrity sha512-oXqx0m6tb4N3JGdmeMSc/i91KppbYsFZKdH0xMOqK8V1rJlzrKlTdokz8ozUXLVejydRN6u2IddxpcijRj2FqQ== - dependencies: - css-unit-converter "^1.1.1" - postcss "^7.0.5" - postcss-selector-parser "^5.0.0-rc.4" - postcss-value-parser "^3.3.1" - -postcss-colormin@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-4.0.3.tgz#ae060bce93ed794ac71264f08132d550956bd381" - integrity sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw== - dependencies: - browserslist "^4.0.0" - color "^3.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-convert-values@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz#ca3813ed4da0f812f9d43703584e449ebe189a7f" - integrity sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ== - dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-discard-comments@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz#1fbabd2c246bff6aaad7997b2b0918f4d7af4033" - integrity sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg== - dependencies: - postcss "^7.0.0" - -postcss-discard-duplicates@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz#3fe133cd3c82282e550fc9b239176a9207b784eb" - integrity sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ== - dependencies: - postcss "^7.0.0" - -postcss-discard-empty@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz#c8c951e9f73ed9428019458444a02ad90bb9f765" - integrity sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w== - dependencies: - postcss "^7.0.0" - -postcss-discard-overridden@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz#652aef8a96726f029f5e3e00146ee7a4e755ff57" - integrity sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg== - dependencies: - postcss "^7.0.0" - -postcss-load-config@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.0.0.tgz#f1312ddbf5912cd747177083c5ef7a19d62ee484" - integrity sha512-V5JBLzw406BB8UIfsAWSK2KSwIJ5yoEIVFb4gVkXci0QdKgA24jLmHZ/ghe/GgX0lJ0/D1uUK1ejhzEY94MChQ== - dependencies: - cosmiconfig "^4.0.0" - import-cwd "^2.0.0" - -postcss-loader@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d" - integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA== - dependencies: - loader-utils "^1.1.0" - postcss "^7.0.0" - postcss-load-config "^2.0.0" - schema-utils "^1.0.0" - -postcss-merge-longhand@^4.0.11: - version "4.0.11" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz#62f49a13e4a0ee04e7b98f42bb16062ca2549e24" - integrity sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw== - dependencies: - css-color-names "0.0.4" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - stylehacks "^4.0.0" - -postcss-merge-rules@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz#362bea4ff5a1f98e4075a713c6cb25aefef9a650" - integrity sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ== - dependencies: - browserslist "^4.0.0" - caniuse-api "^3.0.0" - cssnano-util-same-parent "^4.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" - vendors "^1.0.0" - -postcss-minify-font-values@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz#cd4c344cce474343fac5d82206ab2cbcb8afd5a6" - integrity sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg== - dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-minify-gradients@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz#93b29c2ff5099c535eecda56c4aa6e665a663471" - integrity sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q== - dependencies: - cssnano-util-get-arguments "^4.0.0" - is-color-stop "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-minify-params@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz#6b9cef030c11e35261f95f618c90036d680db874" - integrity sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg== - dependencies: - alphanum-sort "^1.0.0" - browserslist "^4.0.0" - cssnano-util-get-arguments "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - uniqs "^2.0.0" - -postcss-minify-selectors@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz#e2e5eb40bfee500d0cd9243500f5f8ea4262fbd8" - integrity sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g== - dependencies: - alphanum-sort "^1.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" - -postcss-modules-extract-imports@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz#dc87e34148ec7eab5f791f7cd5849833375b741a" - integrity sha512-6jt9XZwUhwmRUhb/CkyJY020PYaPJsCyt3UjbaWo6XEbH/94Hmv6MP7fG2C5NDU/BcHzyGYxNtHvM+LTf9HrYw== - dependencies: - postcss "^6.0.1" - -postcss-modules-local-by-default@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069" - integrity sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk= - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" - -postcss-modules-scope@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90" - integrity sha1-1upkmUx5+XtipytCb75gVqGUu5A= - dependencies: - css-selector-tokenizer "^0.7.0" - postcss "^6.0.1" - -postcss-modules-values@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20" - integrity sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA= - dependencies: - icss-replace-symbols "^1.1.0" - postcss "^6.0.1" - -postcss-normalize-charset@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz#8b35add3aee83a136b0471e0d59be58a50285dd4" - integrity sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g== - dependencies: - postcss "^7.0.0" - -postcss-normalize-display-values@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz#0dbe04a4ce9063d4667ed2be476bb830c825935a" - integrity sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ== - dependencies: - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-positions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz#05f757f84f260437378368a91f8932d4b102917f" - integrity sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA== - dependencies: - cssnano-util-get-arguments "^4.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-repeat-style@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz#c4ebbc289f3991a028d44751cbdd11918b17910c" - integrity sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q== - dependencies: - cssnano-util-get-arguments "^4.0.0" - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-string@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz#cd44c40ab07a0c7a36dc5e99aace1eca4ec2690c" - integrity sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA== - dependencies: - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-timing-functions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz#8e009ca2a3949cdaf8ad23e6b6ab99cb5e7d28d9" - integrity sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A== - dependencies: - cssnano-util-get-match "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-unicode@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz#841bd48fdcf3019ad4baa7493a3d363b52ae1cfb" - integrity sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg== - dependencies: - browserslist "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-url@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz#10e437f86bc7c7e58f7b9652ed878daaa95faae1" - integrity sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA== - dependencies: - is-absolute-url "^2.0.0" - normalize-url "^3.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-normalize-whitespace@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz#bf1d4070fe4fcea87d1348e825d8cc0c5faa7d82" - integrity sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA== - dependencies: - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-ordered-values@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz#0cf75c820ec7d5c4d280189559e0b571ebac0eee" - integrity sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw== - dependencies: - cssnano-util-get-arguments "^4.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-reduce-initial@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz#7fd42ebea5e9c814609639e2c2e84ae270ba48df" - integrity sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA== - dependencies: - browserslist "^4.0.0" - caniuse-api "^3.0.0" - has "^1.0.0" - postcss "^7.0.0" - -postcss-reduce-transforms@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz#17efa405eacc6e07be3414a5ca2d1074681d4e29" - integrity sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg== - dependencies: - cssnano-util-get-match "^4.0.0" - has "^1.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - -postcss-selector-parser@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz#4f875f4afb0c96573d5cf4d74011aee250a7e865" - integrity sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU= - dependencies: - dot-prop "^4.1.1" - indexes-of "^1.0.1" - uniq "^1.0.1" - -postcss-selector-parser@^5.0.0, postcss-selector-parser@^5.0.0-rc.4: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz#249044356697b33b64f1a8f7c80922dddee7195c" - integrity sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ== - dependencies: - cssesc "^2.0.0" - indexes-of "^1.0.1" - uniq "^1.0.1" - -postcss-svgo@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.2.tgz#17b997bc711b333bab143aaed3b8d3d6e3d38258" - integrity sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw== - dependencies: - is-svg "^3.0.0" - postcss "^7.0.0" - postcss-value-parser "^3.0.0" - svgo "^1.0.0" - -postcss-unique-selectors@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz#9446911f3289bfd64c6d680f073c03b1f9ee4bac" - integrity sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg== - dependencies: - alphanum-sort "^1.0.0" - postcss "^7.0.0" - uniqs "^2.0.0" - -postcss-value-parser@^3.0.0, postcss-value-parser@^3.3.0, postcss-value-parser@^3.3.1: - version "3.3.1" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" - integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== - -postcss@^6.0.1, postcss@^6.0.23: - version "6.0.23" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324" - integrity sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag== - dependencies: - chalk "^2.4.1" - source-map "^0.6.1" - supports-color "^5.4.0" - -postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.5: - version "7.0.16" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.16.tgz#48f64f1b4b558cb8b52c88987724359acb010da2" - integrity sha512-MOo8zNSlIqh22Uaa3drkdIAgUGEL+AD1ESiSdmElLUmE2uVDo1QloiT/IfW9qRw8Gw+Y/w69UVMGwbufMSftxA== - dependencies: - chalk "^2.4.2" - source-map "^0.6.1" - supports-color "^6.1.0" - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= - -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= - -prettier@1.16.3: - version "1.16.3" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.16.3.tgz#8c62168453badef702f34b45b6ee899574a6a65d" - integrity sha512-kn/GU6SMRYPxUakNXhpP0EedT/KmaPzr0H5lIsDogrykbaxOpOfAFfk5XA7DZrJyMAv1wlMV3CPcZruGXVVUZw== - -pretty-error@^2.0.2: - version "2.1.1" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" - integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= - dependencies: - renderkid "^2.0.1" - utila "~0.4" - -private@^0.1.6: - version "0.1.8" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" - integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== - -process-nextick-args@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" - integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw== - -process@^0.11.10: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -promise-inflight@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" - integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= - -proxy-addr@~2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" - integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== - dependencies: - forwarded "~0.1.2" - ipaddr.js "1.9.0" - -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= - -psl@^1.1.24, psl@^1.1.28: - version "1.1.31" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.31.tgz#e9aa86d0101b5b105cbe93ac6b784cd547276184" - integrity sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw== - -public-encrypt@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" - integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - safe-buffer "^5.1.2" - -pump@^2.0.0, pump@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" - integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pumpify@^1.3.3: - version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" - integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== - dependencies: - duplexify "^3.6.0" - inherits "^2.0.3" - pump "^2.0.0" - -punycode@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= - -punycode@2.x.x, punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -punycode@^1.2.4, punycode@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= - -q@^1.1.2: - version "1.5.1" - resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" - integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= - -qs@6.7.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" - integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== - -qs@~6.5.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" - integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== - -query-string@^5.0.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" - integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== - dependencies: - decode-uri-component "^0.2.0" - object-assign "^4.1.0" - strict-uri-encode "^1.0.0" - -querystring-es3@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" - integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= - -querystringify@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.1.tgz#60e5a5fd64a7f8bfa4d2ab2ed6fdf4c85bad154e" - integrity sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA== - -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -randomfill@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" - integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== - dependencies: - randombytes "^2.0.5" - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" - integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== - dependencies: - bytes "3.1.0" - http-errors "1.7.2" - iconv-lite "0.4.24" - unpipe "1.0.0" - -rc@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -read-pkg@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.1.1.tgz#5cf234dde7a405c90c88a519ab73c467e9cb83f5" - integrity sha512-dFcTLQi6BZ+aFUaICg7er+/usEoqFdQxiEBsEMNGoipenihtxxtdrQuBXvyANCEI8VuUIVYFgeHGx9sLLvim4w== - dependencies: - "@types/normalize-package-data" "^2.4.0" - normalize-package-data "^2.5.0" - parse-json "^4.0.0" - type-fest "^0.4.1" - -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: - version "2.3.6" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" - integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6, readable-stream@^3.1.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.3.0.tgz#cb8011aad002eb717bf040291feba8569c986fb9" - integrity sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - -regenerate-unicode-properties@^8.0.2: - version "8.1.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" - integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== - dependencies: - regenerate "^1.4.0" - -regenerate@^1.2.1, regenerate@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" - integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== - -regenerator-runtime@^0.13.2: - version "0.13.2" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.2.tgz#32e59c9a6fb9b1a4aff09b4930ca2d4477343447" - integrity sha512-S/TQAZJO+D3m9xeN1WTI8dLKBBiRgXBlTJvbWjCThHWZj9EvHK70Ff50/tYj2J/fvBY6JtFVwRuazHN2E7M9BA== - -regenerator-transform@^0.14.0: - version "0.14.0" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.0.tgz#2ca9aaf7a2c239dd32e4761218425b8c7a86ecaf" - integrity sha512-rtOelq4Cawlbmq9xuMR5gdFmv7ku/sFoB7sRiywx7aq53bc52b4j6zvH7Te1Vt/X2YveDKnCGUbioieU7FEL3w== - dependencies: - private "^0.1.6" - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -regexp-tree@^0.1.6: - version "0.1.10" - resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.10.tgz#d837816a039c7af8a8d64d7a7c3cf6a1d93450bc" - integrity sha512-K1qVSbcedffwuIslMwpe6vGlj+ZXRnGkvjAtFHfDZZZuEdA/h0dxljAPu9vhUo6Rrx2U2AwJ+nSQ6hK+lrP5MQ== - -regexpp@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-1.1.0.tgz#0e3516dd0b7904f413d2d4193dce4618c3a689ab" - integrity sha512-LOPw8FpgdQF9etWMaAfG/WRthIdXJGYp4mJ2Jgn/2lpkbod9jPn0t9UqN7AxBOKNfzRbYyVfgc7Vk4t/MpnXgw== - -regexpu-core@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-1.0.0.tgz#86a763f58ee4d7c2f6b102e4764050de7ed90c6b" - integrity sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs= - dependencies: - regenerate "^1.2.1" - regjsgen "^0.2.0" - regjsparser "^0.1.4" - -regexpu-core@^4.5.4: - version "4.5.4" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.5.4.tgz#080d9d02289aa87fe1667a4f5136bc98a6aebaae" - integrity sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ== - dependencies: - regenerate "^1.4.0" - regenerate-unicode-properties "^8.0.2" - regjsgen "^0.5.0" - regjsparser "^0.6.0" - unicode-match-property-ecmascript "^1.0.4" - unicode-match-property-value-ecmascript "^1.1.0" - -regjsgen@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" - integrity sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc= - -regjsgen@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd" - integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA== - -regjsparser@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" - integrity sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw= - dependencies: - jsesc "~0.5.0" - -regjsparser@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" - integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== - dependencies: - jsesc "~0.5.0" - -relateurl@0.2.x: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= - -remedial@1.x: - version "1.0.8" - resolved "https://registry.yarnpkg.com/remedial/-/remedial-1.0.8.tgz#a5e4fd52a0e4956adbaf62da63a5a46a78c578a0" - integrity sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg== - -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -renderkid@^2.0.1: - version "2.0.3" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" - integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== - dependencies: - css-select "^1.1.0" - dom-converter "^0.2" - htmlparser2 "^3.3.0" - strip-ansi "^3.0.0" - utila "^0.4.0" - -repeat-element@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" - integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -request-promise-core@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.2.tgz#339f6aababcafdb31c799ff158700336301d3346" - integrity sha512-UHYyq1MO8GsefGEt7EprS8UrXsm1TxEvFUX1IMTuSLU2Rh7fTIdFtl8xD7JiEYiWU2dl+NYAjCTksTehQUxPag== - dependencies: - lodash "^4.17.11" - -request-promise-native@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.7.tgz#a49868a624bdea5069f1251d0a836e0d89aa2c59" - integrity sha512-rIMnbBdgNViL37nZ1b3L/VfPOpSi0TqVDQPAvO6U14lMzOLrt5nilxCQqtDKhZeDiW0/hkCXGoQjhgJd/tCh6w== - dependencies: - request-promise-core "1.1.2" - stealthy-require "^1.1.1" - tough-cookie "^2.3.3" - -request@^2.87.0: - version "2.88.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" - integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.0" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.4.3" - tunnel-agent "^0.6.0" - uuid "^3.3.2" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - -require-from-string@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -require-main-filename@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" - integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= - -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - -require-uncached@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" - integrity sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM= - dependencies: - caller-path "^0.1.0" - resolve-from "^1.0.0" - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= - -reselect@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/reselect/-/reselect-3.0.1.tgz#efdaa98ea7451324d092b2b2163a6a1d7a9a2147" - integrity sha1-79qpjqdFEyTQkrKyFjpqHXqaIUc= - -resolve-cwd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" - integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= - dependencies: - resolve-from "^3.0.0" - -resolve-from@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" - integrity sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY= - -resolve-from@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" - integrity sha1-six699nWiBvItuZTM17rywoYh0g= - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - -resolve@^1.10.0, resolve@^1.3.2, resolve@^1.4.0, resolve@^1.8.1: - version "1.11.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.0.tgz#4014870ba296176b86343d50b60f3b50609ce232" - integrity sha512-WL2pBDjqT6pGUNSUzMw00o4T7If+z4H2x3Gz893WoUQ5KW8Vr9txp00ykiP16VBaZF5+j/OcXJHZ9+PCvdiDKw== - dependencies: - path-parse "^1.0.6" - -responselike@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= - dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - -rgb-regex@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/rgb-regex/-/rgb-regex-1.0.1.tgz#c0e0d6882df0e23be254a475e8edd41915feaeb1" - integrity sha1-wODWiC3w4jviVKR16O3UGRX+rrE= - -rgba-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" - integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= - -rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.2, rimraf@^2.6.3, rimraf@~2.6.2: - version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" - integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== - dependencies: - glob "^7.1.3" - -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - -run-async@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" - integrity sha1-A3GrSuC91yDUFm19/aZP96RFpsA= - dependencies: - is-promise "^2.1.0" - -run-queue@^1.0.0, run-queue@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" - integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= - dependencies: - aproba "^1.1.1" - -rx-lite-aggregates@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" - integrity sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74= - dependencies: - rx-lite "*" - -rx-lite@*, rx-lite@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" - integrity sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ= - -safe-buffer@5.1.2, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -sax@^1.2.4, sax@~1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== - -schema-utils@^0.4.4: - version "0.4.7" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.7.tgz#ba74f597d2be2ea880131746ee17d0a093c68187" - integrity sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ== - dependencies: - ajv "^6.1.0" - ajv-keywords "^3.1.0" - -schema-utils@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" - integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== - dependencies: - ajv "^6.1.0" - ajv-errors "^1.0.0" - ajv-keywords "^3.1.0" - -select-hose@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= - -select@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" - integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= - -selfsigned@^1.10.4: - version "1.10.4" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.4.tgz#cdd7eccfca4ed7635d47a08bf2d5d3074092e2cd" - integrity sha512-9AukTiDmHXGXWtWjembZ5NDmVvP2695EtpgbCsxCa68w3c88B+alqbmZ4O3hZ4VWGXeGWzEVdvqgAJD8DQPCDw== - dependencies: - node-forge "0.7.5" - -"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" - integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== - -semver@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.1.0.tgz#e95dc415d45ecf03f2f9f83b264a6b11f49c0cca" - integrity sha512-kCqEOOHoBcFs/2Ccuk4Xarm/KiWRSLEX9CAZF8xkJ6ZPlIoTZ8V5f7J16vYLJqDbR7KrxTJpR2lqjIEm2Qx9cQ== - -send@0.17.1: - version "0.17.1" - resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" - integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== - dependencies: - debug "2.6.9" - depd "~1.1.2" - destroy "~1.0.4" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "~1.7.2" - mime "1.6.0" - ms "2.1.1" - on-finished "~2.3.0" - range-parser "~1.2.1" - statuses "~1.5.0" - -serialize-javascript@^1.4.0, serialize-javascript@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-1.7.0.tgz#d6e0dfb2a3832a8c94468e6eb1db97e55a192a65" - integrity sha512-ke8UG8ulpFOxO8f8gRYabHQe/ZntKlcig2Mp+8+URDP1D8vJZ0KUt7LYo07q25Z/+JVSgpr/cui9PIp5H6/+nA== - -serve-index@^1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.14.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" - integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.17.1" - -set-blocking@^2.0.0, set-blocking@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - -set-value@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" - integrity sha1-fbCPnT0i3H945Trzw79GZuzfzPE= - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.1" - to-object-path "^0.3.0" - -set-value@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" - integrity sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - -setimmediate@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= - -setprototypeof@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" - integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== - -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - -shell-quote@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.6.1.tgz#f4781949cce402697127430ea3b3c5476f481767" - integrity sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c= - dependencies: - array-filter "~0.0.0" - array-map "~0.0.0" - array-reduce "~0.0.0" - jsonify "~0.0.0" - -signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" - integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= - -simple-swizzle@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" - integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= - dependencies: - is-arrayish "^0.3.1" - -slash@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" - integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= - -slash@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" - integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== - -slice-ansi@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d" - integrity sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg== - dependencies: - is-fullwidth-code-point "^2.0.0" - -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -sockjs-client@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.3.0.tgz#12fc9d6cb663da5739d3dc5fb6e8687da95cb177" - integrity sha512-R9jxEzhnnrdxLCNln0xg5uGHqMnkhPSTzUZH2eXcR03S/On9Yvoq2wyUZILRUhZCNVu2PmwWVoyuiPz8th8zbg== - dependencies: - debug "^3.2.5" - eventsource "^1.0.7" - faye-websocket "~0.11.1" - inherits "^2.0.3" - json3 "^3.3.2" - url-parse "^1.4.3" - -sockjs@0.3.19: - version "0.3.19" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.19.tgz#d976bbe800af7bd20ae08598d582393508993c0d" - integrity sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw== - dependencies: - faye-websocket "^0.10.0" - uuid "^3.0.1" - -sort-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" - integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= - dependencies: - is-plain-obj "^1.0.0" - -source-list-map@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -source-map-resolve@^0.5.0: - version "0.5.2" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" - integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== - dependencies: - atob "^2.1.1" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-support@~0.5.10: - version "0.5.12" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" - integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-url@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" - integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= - -source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= - -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -spdx-correct@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" - integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" - integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== - -spdx-expression-parse@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" - integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.4.tgz#75ecd1a88de8c184ef015eafb51b5b48bfd11bb1" - integrity sha512-7j8LYJLeY/Yb6ACbQ7F76qy5jHkp0U6jgBfJsk97bwWlVUnUWsAgpyaCvo17h0/RQGnQ036tVDomiwoI4pDkQA== - -spdy-transport@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.0.tgz#81f222b5a743a329aa12cea6a390e60e9b613c52" - integrity sha512-ot0oEGT/PGUpzf/6uk4AWLqkq+irlqHXkrdbk51oWONh3bxQmBuljxPNl66zlRRcIJStWq0QkLUCPOPjgjvU0Q== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= - -sshpk@^1.7.0: - version "1.16.1" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" - integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - -ssri@^5.2.4: - version "5.3.0" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-5.3.0.tgz#ba3872c9c6d33a0704a7d71ff045e5ec48999d06" - integrity sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ== - dependencies: - safe-buffer "^5.1.1" - -ssri@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" - integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== - dependencies: - figgy-pudding "^3.5.1" - -stable@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -stackframe@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.0.4.tgz#357b24a992f9427cba6b545d96a14ed2cbca187b" - integrity sha512-to7oADIniaYwS3MhtCa/sQhrxidCCQiF/qp4/m5iN3ipf0Y7Xlri0f6eG29r08aL7JYl8n32AF3Q5GYBZ7K8vw== - -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - -"statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= - -stealthy-require@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" - integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= - -stream-browserify@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" - integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== - dependencies: - inherits "~2.0.1" - readable-stream "^2.0.2" - -stream-each@^1.1.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" - integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== - dependencies: - end-of-stream "^1.1.0" - stream-shift "^1.0.0" - -stream-http@^2.7.2: - version "2.8.3" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" - integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== - dependencies: - builtin-status-codes "^3.0.0" - inherits "^2.0.1" - readable-stream "^2.3.6" - to-arraybuffer "^1.0.0" - xtend "^4.0.0" - -stream-shift@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" - integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= - -strict-uri-encode@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" - integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= - -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - -string-width@^3.0.0, string-width@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string.prototype.padend@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.0.0.tgz#f3aaef7c1719f170c5eab1c32bf780d96e21f2f0" - integrity sha1-86rvfBcZ8XDF6rHDK/eA2W4h8vA= - dependencies: - define-properties "^1.1.2" - es-abstract "^1.4.3" - function-bind "^1.0.2" - -string.prototype.padstart@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/string.prototype.padstart/-/string.prototype.padstart-3.0.0.tgz#5bcfad39f4649bb2d031292e19bcf0b510d4b242" - integrity sha1-W8+tOfRkm7LQMSkuGbzwtRDUskI= - dependencies: - define-properties "^1.1.2" - es-abstract "^1.4.3" - function-bind "^1.0.2" - -string_decoder@^1.0.0, string_decoder@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.2.0.tgz#fe86e738b19544afe70469243b2a1ee9240eae8d" - integrity sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w== - dependencies: - safe-buffer "~5.1.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - -strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - -strip-indent@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-2.0.0.tgz#5ef8db295d01e6ed6cbf7aab96998d7822527b68" - integrity sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g= - -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= - -stylehacks@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-4.0.3.tgz#6718fcaf4d1e07d8a1318690881e8d96726a71d5" - integrity sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g== - dependencies: - browserslist "^4.0.0" - postcss "^7.0.0" - postcss-selector-parser "^3.0.0" - -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - -supports-color@^5.3.0, supports-color@^5.4.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - -svg-tags@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" - integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q= - -svgo@^1.0.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.2.2.tgz#0253d34eccf2aed4ad4f283e11ee75198f9d7316" - integrity sha512-rAfulcwp2D9jjdGu+0CuqlrAUin6bBWrpoqXWwKDZZZJfXcUXQSxLJOFJCQCSA0x0pP2U0TxSlJu2ROq5Bq6qA== - dependencies: - chalk "^2.4.1" - coa "^2.0.2" - css-select "^2.0.0" - css-select-base-adapter "^0.1.1" - css-tree "1.0.0-alpha.28" - css-url-regex "^1.1.0" - csso "^3.5.1" - js-yaml "^3.13.1" - mkdirp "~0.5.1" - object.values "^1.1.0" - sax "~1.2.4" - stable "^0.1.8" - unquote "~1.1.1" - util.promisify "~1.0.0" - -table@4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/table/-/table-4.0.2.tgz#a33447375391e766ad34d3486e6e2aedc84d2e36" - integrity sha512-UUkEAPdSGxtRpiV9ozJ5cMTtYiqz7Ni1OGqLXRCynrvzdtR1p+cfOWe2RJLwvUG8hNanaSRjecIqwOjqeatDsA== - dependencies: - ajv "^5.2.3" - ajv-keywords "^2.1.0" - chalk "^2.1.0" - lodash "^4.17.4" - slice-ansi "1.0.0" - string-width "^2.1.1" - -tapable@^1.0.0, tapable@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -tar@^4: - version "4.4.8" - resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.8.tgz#b19eec3fde2a96e64666df9fdb40c5ca1bc3747d" - integrity sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ== - dependencies: - chownr "^1.1.1" - fs-minipass "^1.2.5" - minipass "^2.3.4" - minizlib "^1.1.1" - mkdirp "^0.5.0" - safe-buffer "^5.1.2" - yallist "^3.0.2" - -terser-webpack-plugin@^1.1.0, terser-webpack-plugin@^1.2.3: - version "1.2.4" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.2.4.tgz#56f87540c28dd5265753431009388f473b5abba3" - integrity sha512-64IiILNQlACWZLzFlpzNaG0bpQ4ytaB7fwOsbpsdIV70AfLUmIGGeuKL0YV2WmtcrURjE2aOvHD4/lrFV3Rg+Q== - dependencies: - cacache "^11.3.2" - find-cache-dir "^2.0.0" - is-wsl "^1.1.0" - schema-utils "^1.0.0" - serialize-javascript "^1.7.0" - source-map "^0.6.1" - terser "^3.17.0" - webpack-sources "^1.3.0" - worker-farm "^1.7.0" - -terser@^3.17.0: - version "3.17.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-3.17.0.tgz#f88ffbeda0deb5637f9d24b0da66f4e15ab10cb2" - integrity sha512-/FQzzPJmCpjAH9Xvk2paiWrFq+5M6aVOf+2KRbwhByISDX/EujxsK+BAvrhb6H+2rtrLCHK9N01wO014vrIwVQ== - dependencies: - commander "^2.19.0" - source-map "~0.6.1" - source-map-support "~0.5.10" - -text-table@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= - -thenify-all@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" - integrity sha1-GhkY1ALY/D+Y+/I02wvMjMEOlyY= - dependencies: - thenify ">= 3.1.0 < 4" - -"thenify@>= 3.1.0 < 4": - version "3.3.0" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.0.tgz#e69e38a1babe969b0108207978b9f62b88604839" - integrity sha1-5p44obq+lpsBCCB5eLn2K4hgSDk= - dependencies: - any-promise "^1.0.0" - -thread-loader@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/thread-loader/-/thread-loader-2.1.2.tgz#f585dd38e852c7f9cded5d092992108148f5eb30" - integrity sha512-7xpuc9Ifg6WU+QYw/8uUqNdRwMD+N5gjwHKMqETrs96Qn+7BHwECpt2Brzr4HFlf4IAkZsayNhmGdbkBsTJ//w== - dependencies: - loader-runner "^2.3.1" - loader-utils "^1.1.0" - neo-async "^2.6.0" - -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - -through@^2.3.6: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - -thunky@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.3.tgz#f5df732453407b09191dae73e2a8cc73f381a826" - integrity sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow== - -timed-out@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" - integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= - -timers-browserify@^2.0.4: - version "2.0.10" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.10.tgz#1d28e3d2aadf1d5a5996c4e9f95601cd053480ae" - integrity sha512-YvC1SV1XdOUaL6gx5CoGroT3Gu49pK9+TZ38ErPldOWW4j49GI1HKs9DV+KGq/w6y+LZ72W1c8cKz2vzY+qpzg== - dependencies: - setimmediate "^1.0.4" - -timsort@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" - integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= - -tiny-emitter@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" - integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== - -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== - dependencies: - os-tmpdir "~1.0.2" - -to-arraybuffer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" - integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= - -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - -toidentifier@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" - integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== - -topo@3.x.x: - version "3.0.3" - resolved "https://registry.yarnpkg.com/topo/-/topo-3.0.3.tgz#d5a67fb2e69307ebeeb08402ec2a2a6f5f7ad95c" - integrity sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ== - dependencies: - hoek "6.x.x" - -toposort@^1.0.0: - version "1.0.7" - resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" - integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= - -tough-cookie@^2.3.3: - version "2.5.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== - dependencies: - psl "^1.1.28" - punycode "^2.1.1" - -tough-cookie@~2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" - integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== - dependencies: - psl "^1.1.24" - punycode "^1.4.1" - -trim-right@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" - integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= - -tryer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" - integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== - -tslib@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" - integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ== - -tty-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" - integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= - dependencies: - prelude-ls "~1.1.2" - -type-fest@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.4.1.tgz#8bdf77743385d8a4f13ba95f610f5ccd68c728f8" - integrity sha512-IwzA/LSfD2vC1/YDYMv/zHP4rDF1usCwllsDpbolT3D4fUepIO7f9K70jjmUewU/LmGUKJcwcVtDCpnKk4BPMw== - -type-is@~1.6.17, type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - -uglify-js@3.4.x: - version "3.4.10" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" - integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== - dependencies: - commander "~2.19.0" - source-map "~0.6.1" - -uikit@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/uikit/-/uikit-3.1.5.tgz#79c68bd1f7be779c1748734bbc281ac531a53129" - integrity sha512-6kN9GewAeFXeOVRbtLL1PIhIXixIDbRMJGHnIiMXJT5Bp3n6HYfVdaBIjJLEh9Ffuv3JdGnwZdKuGpp7oNcRhw== - -unicode-canonical-property-names-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" - integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== - -unicode-match-property-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" - integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== - dependencies: - unicode-canonical-property-names-ecmascript "^1.0.4" - unicode-property-aliases-ecmascript "^1.0.4" - -unicode-match-property-value-ecmascript@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" - integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== - -unicode-property-aliases-ecmascript@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" - integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== - -union-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" - integrity sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ= - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^0.4.3" - -uniq@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" - integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= - -uniqs@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" - integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI= - -unique-filename@^1.1.0, unique-filename@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" - integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - dependencies: - unique-slug "^2.0.0" - -unique-slug@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.1.tgz#5e9edc6d1ce8fb264db18a507ef9bd8544451ca6" - integrity sha512-n9cU6+gITaVu7VGj1Z8feKMmfAjEAQGhwD9fE3zvpRRa0wEIx8ODYkVGfSc94M2OX00tUFV8wH3zYbm1I8mxFg== - dependencies: - imurmurhash "^0.1.4" - -universalify@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= - -unquote@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" - integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068" - integrity sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q== - -upper-case@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" - integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= - -uri-js@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" - integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== - dependencies: - punycode "^2.1.0" - -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - -url-loader@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-1.1.2.tgz#b971d191b83af693c5e3fea4064be9e1f2d7f8d8" - integrity sha512-dXHkKmw8FhPqu8asTc1puBfe3TehOCo2+RmOOev5suNCIYBcT626kxiWg1NBVkwc4rO8BGa7gP70W7VXuqHrjg== - dependencies: - loader-utils "^1.1.0" - mime "^2.0.3" - schema-utils "^1.0.0" - -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" - -url-parse@^1.4.3: - version "1.4.7" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278" - integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -url-to-options@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" - integrity sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k= - -url@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" - integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= - dependencies: - punycode "1.3.2" - querystring "0.2.0" - -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1, util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -util.promisify@1.0.0, util.promisify@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" - integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== - dependencies: - define-properties "^1.1.2" - object.getownpropertydescriptors "^2.0.3" - -util@0.10.3: - version "0.10.3" - resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" - integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= - dependencies: - inherits "2.0.1" - -util@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" - integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== - dependencies: - inherits "2.0.3" - -utila@^0.4.0, utila@~0.4: - version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= - -uuid@^3.0.1, uuid@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" - integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== - -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - -vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= - -vendors@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.3.tgz#a6467781abd366217c050f8202e7e50cc9eef8c0" - integrity sha512-fOi47nsJP5Wqefa43kyWSg80qF+Q3XA6MUkgi7Hp1HQaKDQW4cQrK2D0P7mmbFtsV1N89am55Yru/nyEwRubcw== - -verror@1.10.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= - dependencies: - assert-plus "^1.0.0" - core-util-is "1.0.2" - extsprintf "^1.2.0" - -vm-browserify@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-0.0.4.tgz#5d7ea45bbef9e4a6ff65f95438e0a87c357d5a73" - integrity sha1-XX6kW7755Kb/ZflUOOCofDV9WnM= - dependencies: - indexof "0.0.1" - -vue-eslint-parser@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz#c268c96c6d94cfe3d938a5f7593959b0ca3360d1" - integrity sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw== - dependencies: - debug "^3.1.0" - eslint-scope "^3.7.1" - eslint-visitor-keys "^1.0.0" - espree "^3.5.2" - esquery "^1.0.0" - lodash "^4.17.4" - -vue-hot-reload-api@^2.3.0: - version "2.3.3" - resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.3.tgz#2756f46cb3258054c5f4723de8ae7e87302a1ccf" - integrity sha512-KmvZVtmM26BQOMK1rwUZsrqxEGeKiYSZGA7SNWE6uExx8UX/cj9hq2MRV/wWC3Cq6AoeDGk57rL9YMFRel/q+g== - -vue-loader@^15.7.0: - version "15.7.0" - resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.7.0.tgz#27275aa5a3ef4958c5379c006dd1436ad04b25b3" - integrity sha512-x+NZ4RIthQOxcFclEcs8sXGEWqnZHodL2J9Vq+hUz+TDZzBaDIh1j3d9M2IUlTjtrHTZy4uMuRdTi8BGws7jLA== - dependencies: - "@vue/component-compiler-utils" "^2.5.1" - hash-sum "^1.0.2" - loader-utils "^1.1.0" - vue-hot-reload-api "^2.3.0" - vue-style-loader "^4.1.0" - -vue-resource@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/vue-resource/-/vue-resource-1.5.1.tgz#0f3d685e3254d21800bebd966edcf56c34b3b6e4" - integrity sha512-o6V4wNgeqP+9v9b2bPXrr20CGNQPEXjpbUWdZWq9GJhqVeAGcYoeTtn/D4q059ZiyN0DIrDv/ADrQUmlUQcsmg== - dependencies: - got "^8.0.3" - -vue-router@^3.0.6: - version "3.0.6" - resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.0.6.tgz#2e4f0f9cbb0b96d0205ab2690cfe588935136ac3" - integrity sha512-Ox0ciFLswtSGRTHYhGvx2L44sVbTPNS+uD2kRISuo8B39Y79rOo0Kw0hzupTmiVtftQYCZl87mwldhh2L9Aquw== - -vue-select@^2.6.4: - version "2.6.4" - resolved "https://registry.yarnpkg.com/vue-select/-/vue-select-2.6.4.tgz#cb82ea3417a4b0f68101f53528ba1c0b04de5308" - integrity sha512-OZIzzGlYwro2QcgAmcn2c4LQDPrYIT3+iaf4WciKPIeFFjgzRti6FC5Qjr2XEPqd85wqtsifuYyxsyb/JoJ4CQ== - -vue-style-loader@^4.1.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/vue-style-loader/-/vue-style-loader-4.1.2.tgz#dedf349806f25ceb4e64f3ad7c0a44fba735fcf8" - integrity sha512-0ip8ge6Gzz/Bk0iHovU9XAUQaFt/G2B61bnWa2tCcqqdgfHs1lF9xXorFbE55Gmy92okFT+8bfmySuUOu13vxQ== - dependencies: - hash-sum "^1.0.2" - loader-utils "^1.0.2" - -vue-template-compiler@^2.6.10: - version "2.6.10" - resolved "https://registry.yarnpkg.com/vue-template-compiler/-/vue-template-compiler-2.6.10.tgz#323b4f3495f04faa3503337a82f5d6507799c9cc" - integrity sha512-jVZkw4/I/HT5ZMvRnhv78okGusqe0+qH2A0Em0Cp8aq78+NK9TII263CDVz2QXZsIT+yyV/gZc/j/vlwa+Epyg== - dependencies: - de-indent "^1.0.2" - he "^1.1.0" - -vue-template-es2015-compiler@^1.9.0: - version "1.9.1" - resolved "https://registry.yarnpkg.com/vue-template-es2015-compiler/-/vue-template-es2015-compiler-1.9.1.tgz#1ee3bc9a16ecbf5118be334bb15f9c46f82f5825" - integrity sha512-4gDntzrifFnCEvyoO8PqyJDmguXgVPxKiIxrBKjIowvL9l+N66196+72XVYR8BBf1Uv1Fgt3bGevJ+sEmxfZzw== - -vue@^2.6.10: - version "2.6.10" - resolved "https://registry.yarnpkg.com/vue/-/vue-2.6.10.tgz#a72b1a42a4d82a721ea438d1b6bf55e66195c637" - integrity sha512-ImThpeNU9HbdZL3utgMCq0oiMzAkt1mcgy3/E6zWC/G6AaQoeuFdsl9nDhTDU3X1R6FK7nsIUuRACVcjI+A2GQ== - -watchpack@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" - integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== - dependencies: - chokidar "^2.0.2" - graceful-fs "^4.1.2" - neo-async "^2.5.0" - -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== - dependencies: - minimalistic-assert "^1.0.0" - -wcwidth@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= - dependencies: - defaults "^1.0.3" - -webpack-bundle-analyzer@^3.3.0: - version "3.3.2" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.3.2.tgz#3da733a900f515914e729fcebcd4c40dde71fc6f" - integrity sha512-7qvJLPKB4rRWZGjVp5U1KEjwutbDHSKboAl0IfafnrdXMrgC0tOtZbQD6Rw0u4cmpgRN4O02Fc0t8eAT+FgGzA== - dependencies: - acorn "^6.0.7" - acorn-walk "^6.1.1" - bfj "^6.1.1" - chalk "^2.4.1" - commander "^2.18.0" - ejs "^2.6.1" - express "^4.16.3" - filesize "^3.6.1" - gzip-size "^5.0.0" - lodash "^4.17.10" - mkdirp "^0.5.1" - opener "^1.5.1" - ws "^6.0.0" - -webpack-chain@^4.11.0: - version "4.12.1" - resolved "https://registry.yarnpkg.com/webpack-chain/-/webpack-chain-4.12.1.tgz#6c8439bbb2ab550952d60e1ea9319141906c02a6" - integrity sha512-BCfKo2YkDe2ByqkEWe1Rw+zko4LsyS75LVr29C6xIrxAg9JHJ4pl8kaIZ396SUSNp6b4815dRZPSTAS8LlURRQ== - dependencies: - deepmerge "^1.5.2" - javascript-stringify "^1.6.0" - -webpack-dev-middleware@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.0.tgz#ef751d25f4e9a5c8a35da600c5fda3582b5c6cff" - integrity sha512-qvDesR1QZRIAZHOE3iQ4CXLZZSQ1lAUsSpnQmlB1PBfoN/xdRjmge3Dok0W4IdaVLJOGJy3sGI4sZHwjRU0PCA== - dependencies: - memory-fs "^0.4.1" - mime "^2.4.2" - range-parser "^1.2.1" - webpack-log "^2.0.0" - -webpack-dev-server@^3.3.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.4.1.tgz#a5fd8dec95dec410098e7d9a037ff9405395d51a" - integrity sha512-CRqZQX2ryMtrg0r3TXQPpNh76eM1HD3Wmu6zDBxIKi/d2y+4aa28Ia8weNT0bfgWpY6Vs3Oq/K8+DjfbR+tWYw== - dependencies: - ansi-html "0.0.7" - bonjour "^3.5.0" - chokidar "^2.1.6" - compression "^1.7.4" - connect-history-api-fallback "^1.6.0" - debug "^4.1.1" - del "^4.1.1" - express "^4.17.0" - html-entities "^1.2.1" - http-proxy-middleware "^0.19.1" - import-local "^2.0.0" - internal-ip "^4.3.0" - ip "^1.1.5" - killable "^1.0.1" - loglevel "^1.6.1" - opn "^5.5.0" - portfinder "^1.0.20" - schema-utils "^1.0.0" - selfsigned "^1.10.4" - semver "^6.0.0" - serve-index "^1.9.1" - sockjs "0.3.19" - sockjs-client "1.3.0" - spdy "^4.0.0" - strip-ansi "^3.0.1" - supports-color "^6.1.0" - url "^0.11.0" - webpack-dev-middleware "^3.7.0" - webpack-log "^2.0.0" - yargs "12.0.5" - -webpack-log@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" - integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== - dependencies: - ansi-colors "^3.0.0" - uuid "^3.3.2" - -webpack-merge@^4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-4.2.1.tgz#5e923cf802ea2ace4fd5af1d3247368a633489b4" - integrity sha512-4p8WQyS98bUJcCvFMbdGZyZmsKuWjWVnVHnAS3FFg0HDaRVrPbkivx2RYCre8UiemD67RsiFFLfn4JhLAin8Vw== - dependencies: - lodash "^4.17.5" - -webpack-sources@^1.1.0, webpack-sources@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.3.0.tgz#2a28dcb9f1f45fe960d8f1493252b5ee6530fa85" - integrity sha512-OiVgSrbGu7NEnEvQJJgdSFPl2qWKkWq5lHMhgiToIiN9w34EBnjYzSYs+VbL5KoYiLNtFFa7BZIKxRED3I32pA== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -"webpack@>=4 < 4.29": - version "4.28.4" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.28.4.tgz#1ddae6c89887d7efb752adf0c3cd32b9b07eacd0" - integrity sha512-NxjD61WsK/a3JIdwWjtIpimmvE6UrRi3yG54/74Hk9rwNj5FPkA4DJCf1z4ByDWLkvZhTZE+P3C/eh6UD5lDcw== - dependencies: - "@webassemblyjs/ast" "1.7.11" - "@webassemblyjs/helper-module-context" "1.7.11" - "@webassemblyjs/wasm-edit" "1.7.11" - "@webassemblyjs/wasm-parser" "1.7.11" - acorn "^5.6.2" - acorn-dynamic-import "^3.0.0" - ajv "^6.1.0" - ajv-keywords "^3.1.0" - chrome-trace-event "^1.0.0" - enhanced-resolve "^4.1.0" - eslint-scope "^4.0.0" - json-parse-better-errors "^1.0.2" - loader-runner "^2.3.0" - loader-utils "^1.1.0" - memory-fs "~0.4.1" - micromatch "^3.1.8" - mkdirp "~0.5.0" - neo-async "^2.5.0" - node-libs-browser "^2.0.0" - schema-utils "^0.4.4" - tapable "^1.1.0" - terser-webpack-plugin "^1.1.0" - watchpack "^1.5.0" - webpack-sources "^1.3.0" - -websocket-driver@>=0.5.1: - version "0.7.0" - resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.0.tgz#0caf9d2d755d93aee049d4bdd0d3fe2cca2a24eb" - integrity sha1-DK+dLXVdk67gSdS90NP+LMoqJOs= - dependencies: - http-parser-js ">=0.4.0" - websocket-extensions ">=0.1.1" - -websocket-extensions@>=0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.3.tgz#5d2ff22977003ec687a4b87073dfbbac146ccf29" - integrity sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg== - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - -which@^1.2.9: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -wide-align@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" - integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== - dependencies: - string-width "^1.0.2 || 2" - -wordwrap@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= - -worker-farm@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" - integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== - dependencies: - errno "~0.1.7" - -wrap-ansi@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" - integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - -wrap-ansi@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" - integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== - dependencies: - ansi-styles "^3.2.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -write@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" - integrity sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c= - dependencies: - mkdirp "^0.5.1" - -ws@^6.0.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" - integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== - dependencies: - async-limiter "~1.0.0" - -xtend@^4.0.0, xtend@~4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68= - -"y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" - integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== - -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= - -yallist@^3.0.0, yallist@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9" - integrity sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A== - -yargs-parser@^11.1.1: - version "11.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" - integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs-parser@^13.1.0: - version "13.1.0" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.0.tgz#7016b6dd03e28e1418a510e258be4bff5a31138f" - integrity sha512-Yq+32PrijHRri0vVKQEm+ys8mbqWjLiwQkMFNXEENutzLPP0bE4Lcd4iA3OQY5HF+GD3xXxf0MEHb8E4/SA3AA== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs@12.0.5: - version "12.0.5" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" - integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== - dependencies: - cliui "^4.0.0" - decamelize "^1.2.0" - find-up "^3.0.0" - get-caller-file "^1.0.1" - os-locale "^3.0.0" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^2.0.0" - which-module "^2.0.0" - y18n "^3.2.1 || ^4.0.0" - yargs-parser "^11.1.1" - -yargs@^13.0.0: - version "13.2.4" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" - integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - os-locale "^3.1.0" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.0" - -yorkie@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/yorkie/-/yorkie-2.0.0.tgz#92411912d435214e12c51c2ae1093e54b6bb83d9" - integrity sha512-jcKpkthap6x63MB4TxwCyuIGkV0oYP/YRyuQU5UO0Yz/E/ZAu+653/uov+phdmO54n6BcvFRyyt0RRrWdN2mpw== - dependencies: - execa "^0.8.0" - is-ci "^1.0.10" - normalize-path "^1.0.0" - strip-indent "^2.0.0"